diff --git a/be/src/cloud/cloud_base_compaction.cpp b/be/src/cloud/cloud_base_compaction.cpp index 9742e57dcf9d34..d053214e964a78 100644 --- a/be/src/cloud/cloud_base_compaction.cpp +++ b/be/src/cloud/cloud_base_compaction.cpp @@ -268,8 +268,9 @@ Status CloudBaseCompaction::execute_compact() { << ", output_version=" << _output_version; return res; } - LOG_INFO("finish CloudBaseCompaction, tablet_id={}, cost={}ms", _tablet->tablet_id(), - duration_cast(steady_clock::now() - start).count()) + LOG_INFO("finish CloudBaseCompaction, tablet_id={}, cost={}ms range=[{}-{}]", + _tablet->tablet_id(), duration_cast(steady_clock::now() - start).count(), + _input_rowsets.front()->start_version(), _input_rowsets.back()->end_version()) .tag("job_id", _uuid) .tag("input_rowsets", _input_rowsets.size()) .tag("input_rows", _input_row_num) @@ -343,7 +344,7 @@ Status CloudBaseCompaction::modify_rowsets() { .tag("input_rowsets", _input_rowsets.size()) .tag("input_rows", _input_row_num) .tag("input_segments", _input_segments) - .tag("update_bitmap_size", output_rowset_delete_bitmap->delete_bitmap.size()); + .tag("num_output_delete_bitmap", output_rowset_delete_bitmap->delete_bitmap.size()); compaction_job->set_delete_bitmap_lock_initiator(initiator); } diff --git a/be/src/cloud/cloud_cumulative_compaction.cpp b/be/src/cloud/cloud_cumulative_compaction.cpp index 1acf8efe32e62b..c7a82b322fb82a 100644 --- a/be/src/cloud/cloud_cumulative_compaction.cpp +++ b/be/src/cloud/cloud_cumulative_compaction.cpp @@ -204,8 +204,9 @@ Status CloudCumulativeCompaction::execute_compact() { << ", output_version=" << _output_version; return res; } - LOG_INFO("finish CloudCumulativeCompaction, tablet_id={}, cost={}ms", _tablet->tablet_id(), - duration_cast(steady_clock::now() - start).count()) + LOG_INFO("finish CloudCumulativeCompaction, tablet_id={}, cost={}ms, range=[{}-{}]", + _tablet->tablet_id(), duration_cast(steady_clock::now() - start).count(), + _input_rowsets.front()->start_version(), _input_rowsets.back()->end_version()) .tag("job_id", _uuid) .tag("input_rowsets", _input_rowsets.size()) .tag("input_rows", _input_row_num) @@ -299,7 +300,8 @@ Status CloudCumulativeCompaction::modify_rowsets() { .tag("input_rowsets", _input_rowsets.size()) .tag("input_rows", _input_row_num) .tag("input_segments", _input_segments) - .tag("update_bitmap_size", output_rowset_delete_bitmap->delete_bitmap.size()); + .tag("number_output_delete_bitmap", + output_rowset_delete_bitmap->delete_bitmap.size()); compaction_job->set_delete_bitmap_lock_initiator(initiator); } diff --git a/be/src/cloud/cloud_storage_engine.h b/be/src/cloud/cloud_storage_engine.h index 072b8366542253..2cd47c52dbeb62 100644 --- a/be/src/cloud/cloud_storage_engine.h +++ b/be/src/cloud/cloud_storage_engine.h @@ -75,7 +75,7 @@ class CloudStorageEngine final : public BaseStorageEngine { void _check_file_cache_ttl_block_valid(); std::optional get_storage_resource(const std::string& vault_id) { - LOG(INFO) << "Getting storage resource for vault_id: " << vault_id; + VLOG_DEBUG << "Getting storage resource for vault_id: " << vault_id; bool synced = false; do { diff --git a/be/src/cloud/cloud_tablet.cpp b/be/src/cloud/cloud_tablet.cpp index c7d3170726b2d5..31b7c6dd5dc8cd 100644 --- a/be/src/cloud/cloud_tablet.cpp +++ b/be/src/cloud/cloud_tablet.cpp @@ -33,6 +33,7 @@ #include "cloud/cloud_meta_mgr.h" #include "cloud/cloud_storage_engine.h" #include "cloud/cloud_tablet_mgr.h" +#include "common/logging.h" #include "io/cache/block_file_cache_downloader.h" #include "io/cache/block_file_cache_factory.h" #include "olap/cumulative_compaction_time_series_policy.h" @@ -408,6 +409,9 @@ uint64_t CloudTablet::delete_expired_stale_rowsets() { auto rs_it = _stale_rs_version_map.find(v_ts->version()); if (rs_it != _stale_rs_version_map.end()) { expired_rowsets.push_back(rs_it->second); + LOG(INFO) << "erase stale rowset, tablet_id=" << tablet_id() + << " rowset_id=" << rs_it->second->rowset_id().to_string() + << " version=" << rs_it->first.to_string(); _stale_rs_version_map.erase(rs_it); } else { LOG(WARNING) << "cannot find stale rowset " << v_ts->version() << " in tablet " @@ -657,11 +661,14 @@ void CloudTablet::get_compaction_status(std::string* json_result) { } void CloudTablet::set_cumulative_layer_point(int64_t new_point) { + if (new_point == Tablet::K_INVALID_CUMULATIVE_POINT || new_point >= _cumulative_point) { + _cumulative_point = new_point; + return; + } // cumulative point should only be reset to -1, or be increased - CHECK(new_point == Tablet::K_INVALID_CUMULATIVE_POINT || new_point >= _cumulative_point) - << "Unexpected cumulative point: " << new_point - << ", origin: " << _cumulative_point.load(); - _cumulative_point = new_point; + // FIXME: could happen in currently unresolved race conditions + LOG(WARNING) << "Unexpected cumulative point: " << new_point + << ", origin: " << _cumulative_point.load(); } std::vector CloudTablet::pick_candidate_rowsets_to_base_compaction() { diff --git a/be/src/clucene b/be/src/clucene index a506dbb6c523aa..2204eaec46a68e 160000 --- a/be/src/clucene +++ b/be/src/clucene @@ -1 +1 @@ -Subproject commit a506dbb6c523aa65044eb1c527a066d236172543 +Subproject commit 2204eaec46a68e5e9a1876b7021f24839ecb2cf0 diff --git a/be/src/exec/schema_scanner/schema_active_queries_scanner.cpp b/be/src/exec/schema_scanner/schema_active_queries_scanner.cpp index 9805163802699a..0ccff6439b802b 100644 --- a/be/src/exec/schema_scanner/schema_active_queries_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_active_queries_scanner.cpp @@ -26,6 +26,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaActiveQueriesScanner::_s_tbls_columns = { // name, type, size {"QUERY_ID", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -92,7 +94,7 @@ Status SchemaActiveQueriesScanner::_get_active_queries_block_from_fe() { _active_query_block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError("active queries schema is not match for FE and BE"); } @@ -119,7 +121,7 @@ Status SchemaActiveQueriesScanner::get_next_block_internal(vectorized::Block* bl if (_active_query_block == nullptr) { RETURN_IF_ERROR(_get_active_queries_block_from_fe()); - _total_rows = _active_query_block->rows(); + _total_rows = (int)_active_query_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_backend_active_tasks.cpp b/be/src/exec/schema_scanner/schema_backend_active_tasks.cpp index 74e95f4203217c..eb7b373c7dc7f6 100644 --- a/be/src/exec/schema_scanner/schema_backend_active_tasks.cpp +++ b/be/src/exec/schema_scanner/schema_backend_active_tasks.cpp @@ -25,6 +25,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaBackendActiveTasksScanner::_s_tbls_columns = { // name, type, size {"BE_ID", TYPE_BIGINT, sizeof(int64_t), false}, @@ -76,7 +78,7 @@ Status SchemaBackendActiveTasksScanner::get_next_block_internal(vectorized::Bloc ExecEnv::GetInstance()->runtime_query_statistics_mgr()->get_active_be_tasks_block( _task_stats_block.get()); - _total_rows = _task_stats_block->rows(); + _total_rows = (int)_task_stats_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_catalog_meta_cache_stats_scanner.cpp b/be/src/exec/schema_scanner/schema_catalog_meta_cache_stats_scanner.cpp index 4c067057729f21..576ae3f9e919c7 100644 --- a/be/src/exec/schema_scanner/schema_catalog_meta_cache_stats_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_catalog_meta_cache_stats_scanner.cpp @@ -27,6 +27,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaCatalogMetaCacheStatsScanner::_s_tbls_columns = { {"CATALOG_NAME", TYPE_STRING, sizeof(StringRef), true}, {"CACHE_NAME", TYPE_STRING, sizeof(StringRef), true}, @@ -86,7 +88,7 @@ Status SchemaCatalogMetaCacheStatsScanner::_get_meta_cache_from_fe() { _block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError( "catalog meta cache stats schema is not match for FE and BE"); @@ -115,7 +117,7 @@ Status SchemaCatalogMetaCacheStatsScanner::get_next_block_internal(vectorized::B if (_block == nullptr) { RETURN_IF_ERROR(_get_meta_cache_from_fe()); - _total_rows = _block->rows(); + _total_rows = (int)_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_columns_scanner.cpp b/be/src/exec/schema_scanner/schema_columns_scanner.cpp index b60dfc3d203f89..2cc827a7b43e78 100644 --- a/be/src/exec/schema_scanner/schema_columns_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_columns_scanner.cpp @@ -30,6 +30,8 @@ #include "vec/common/string_ref.h" namespace doris { +#include "common/compile_check_begin.h" + class RuntimeState; namespace vectorized { @@ -411,7 +413,7 @@ Status SchemaColumnsScanner::_fill_block_impl(vectorized::Block* block) { { std::vector strs(columns_num); int offset_index = 0; - int cur_table_index = _table_index - _desc_result.tables_offset.size(); + int cur_table_index = int(_table_index - _desc_result.tables_offset.size()); for (int i = 0; i < columns_num; ++i) { while (_desc_result.tables_offset[offset_index] <= i) { @@ -609,14 +611,14 @@ Status SchemaColumnsScanner::_fill_block_impl(vectorized::Block* block) { // EXTRA { StringRef str = StringRef("", 0); - std::vector datas(columns_num, &str); - RETURN_IF_ERROR(fill_dest_column_for_range(block, 17, datas)); + std::vector filled_values(columns_num, &str); + RETURN_IF_ERROR(fill_dest_column_for_range(block, 17, filled_values)); } // PRIVILEGES { StringRef str = StringRef("", 0); - std::vector datas(columns_num, &str); - RETURN_IF_ERROR(fill_dest_column_for_range(block, 18, datas)); + std::vector filled_values(columns_num, &str); + RETURN_IF_ERROR(fill_dest_column_for_range(block, 18, filled_values)); } // COLUMN_COMMENT { diff --git a/be/src/exec/schema_scanner/schema_file_cache_statistics.cpp b/be/src/exec/schema_scanner/schema_file_cache_statistics.cpp index ecad274d218983..8a3efa0edc537c 100644 --- a/be/src/exec/schema_scanner/schema_file_cache_statistics.cpp +++ b/be/src/exec/schema_scanner/schema_file_cache_statistics.cpp @@ -25,6 +25,7 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" std::vector SchemaFileCacheStatisticsScanner::_s_tbls_columns = { // name, type, size @@ -68,7 +69,7 @@ Status SchemaFileCacheStatisticsScanner::get_next_block_internal(vectorized::Blo _stats_block->reserve(_block_rows_limit); ExecEnv::GetInstance()->file_cache_factory()->get_cache_stats_block(_stats_block.get()); - _total_rows = _stats_block->rows(); + _total_rows = (int)_stats_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_partitions_scanner.cpp b/be/src/exec/schema_scanner/schema_partitions_scanner.cpp index 459715fd628943..dd7919a7fe2e30 100644 --- a/be/src/exec/schema_scanner/schema_partitions_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_partitions_scanner.cpp @@ -31,6 +31,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + class RuntimeState; namespace vectorized { class Block; @@ -138,7 +140,7 @@ Status SchemaPartitionsScanner::get_onedb_info_from_fe(int64_t dbId) { } _partitions_block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError("table options schema is not match for FE and BE"); } @@ -178,7 +180,7 @@ Status SchemaPartitionsScanner::get_next_block_internal(vectorized::Block* block if (_db_index < _db_result.db_ids.size()) { RETURN_IF_ERROR(get_onedb_info_from_fe(_db_result.db_ids[_db_index])); _row_idx = 0; // reset row index so that it start filling for next block. - _total_rows = _partitions_block->rows(); + _total_rows = (int)_partitions_block->rows(); _db_index++; } } diff --git a/be/src/exec/schema_scanner/schema_processlist_scanner.cpp b/be/src/exec/schema_scanner/schema_processlist_scanner.cpp index 185ef2ab44237f..92c80262963b03 100644 --- a/be/src/exec/schema_scanner/schema_processlist_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_processlist_scanner.cpp @@ -30,6 +30,7 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" std::vector SchemaProcessListScanner::_s_processlist_columns = { {"CURRENT_CONNECTED", TYPE_VARCHAR, sizeof(StringRef), false}, @@ -126,7 +127,7 @@ Status SchemaProcessListScanner::_fill_block_impl(vectorized::Block* block) { datas[row_idx] = &int_vals[row_idx]; } else if (_s_processlist_columns[col_idx].type == TYPE_DATETIMEV2) { auto* dv = reinterpret_cast*>(&int_vals[row_idx]); - if (!dv->from_date_str(column_value.data(), column_value.size(), -1, + if (!dv->from_date_str(column_value.data(), (int)column_value.size(), -1, config::allow_zero_date)) { return Status::InternalError( "process list meet invalid data, column={}, data={}, reason={}", diff --git a/be/src/exec/schema_scanner/schema_routine_scanner.cpp b/be/src/exec/schema_scanner/schema_routine_scanner.cpp index 8660d75e8a1faf..7f16c0cddba460 100644 --- a/be/src/exec/schema_scanner/schema_routine_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_routine_scanner.cpp @@ -26,6 +26,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaRoutinesScanner::_s_tbls_columns = { {"SPECIFIC_NAME", TYPE_VARCHAR, sizeof(StringRef), true}, {"ROUTINE_CATALOG", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -94,7 +96,7 @@ Status SchemaRoutinesScanner::get_block_from_fe() { } _routines_block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError("routine table schema is not match for FE and BE"); } @@ -121,7 +123,7 @@ Status SchemaRoutinesScanner::get_next_block_internal(vectorized::Block* block, if (_routines_block == nullptr) { RETURN_IF_ERROR(get_block_from_fe()); - _total_rows = _routines_block->rows(); + _total_rows = (int)_routines_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_rowsets_scanner.cpp b/be/src/exec/schema_scanner/schema_rowsets_scanner.cpp index 3aa0e944a822c5..aea98bd61ac89a 100644 --- a/be/src/exec/schema_scanner/schema_rowsets_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_rowsets_scanner.cpp @@ -48,6 +48,8 @@ namespace vectorized { class Block; } // namespace vectorized +#include "common/compile_check_begin.h" + std::vector SchemaRowsetsScanner::_s_tbls_columns = { // name, type, size, is_null {"BACKEND_ID", TYPE_BIGINT, sizeof(int64_t), true}, @@ -132,13 +134,13 @@ Status SchemaRowsetsScanner::get_next_block_internal(vectorized::Block* block, b Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { SCOPED_TIMER(_fill_block_timer); size_t fill_rowsets_num = std::min(1000UL, rowsets_.size() - _rowsets_idx); - auto fill_idx_begin = _rowsets_idx; - auto fill_idx_end = _rowsets_idx + fill_rowsets_num; + size_t fill_idx_begin = _rowsets_idx; + size_t fill_idx_end = _rowsets_idx + fill_rowsets_num; std::vector datas(fill_rowsets_num); // BACKEND_ID { int64_t src = backend_id_; - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { datas[i - fill_idx_begin] = &src; } RETURN_IF_ERROR(fill_dest_column_for_range(block, 0, datas)); @@ -147,7 +149,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { { std::vector rowset_ids(fill_rowsets_num); std::vector strs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; rowset_ids[i - fill_idx_begin] = rowset->rowset_id().to_string(); strs[i - fill_idx_begin] = StringRef(rowset_ids[i - fill_idx_begin].c_str(), @@ -159,7 +161,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // TABLET_ID { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->rowset_meta()->tablet_id(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -169,7 +171,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // ROWSET_NUM_ROWS { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->num_rows(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -179,7 +181,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // TXN_ID { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->txn_id(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -189,7 +191,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // NUM_SEGMENTS { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->num_segments(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -199,7 +201,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // START_VERSION { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->start_version(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -209,7 +211,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // END_VERSION { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->end_version(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -219,7 +221,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // INDEX_DISK_SIZE { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->index_disk_size(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -229,7 +231,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // DATA_DISK_SIZE { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->data_disk_size(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -239,7 +241,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // CREATION_TIME { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; int64_t creation_time = rowset->creation_time(); srcs[i - fill_idx_begin].from_unixtime(creation_time, TimezoneUtils::default_time_zone); @@ -250,7 +252,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // NEWEST_WRITE_TIMESTAMP { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; int64_t newest_write_timestamp = rowset->newest_write_timestamp(); srcs[i - fill_idx_begin].from_unixtime(newest_write_timestamp, @@ -262,7 +264,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // SCHEMA_VERSION { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->tablet_schema()->schema_version(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; diff --git a/be/src/exec/schema_scanner/schema_table_options_scanner.cpp b/be/src/exec/schema_scanner/schema_table_options_scanner.cpp index bb778996a83f04..fd9d17c8b93cf2 100644 --- a/be/src/exec/schema_scanner/schema_table_options_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_table_options_scanner.cpp @@ -27,6 +27,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaTableOptionsScanner::_s_tbls_columns = { {"TABLE_CATALOG", TYPE_VARCHAR, sizeof(StringRef), true}, {"TABLE_SCHEMA", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -110,7 +112,7 @@ Status SchemaTableOptionsScanner::get_onedb_info_from_fe(int64_t dbId) { } _tableoptions_block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError("table options schema is not match for FE and BE"); } @@ -150,7 +152,7 @@ Status SchemaTableOptionsScanner::get_next_block_internal(vectorized::Block* blo if (_db_index < _db_result.db_ids.size()) { RETURN_IF_ERROR(get_onedb_info_from_fe(_db_result.db_ids[_db_index])); _row_idx = 0; // reset row index so that it start filling for next block. - _total_rows = _tableoptions_block->rows(); + _total_rows = (int)_tableoptions_block->rows(); _db_index++; } } diff --git a/be/src/exec/schema_scanner/schema_table_properties_scanner.cpp b/be/src/exec/schema_scanner/schema_table_properties_scanner.cpp index 8d6a26a552f707..682560372b97c7 100644 --- a/be/src/exec/schema_scanner/schema_table_properties_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_table_properties_scanner.cpp @@ -27,6 +27,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaTablePropertiesScanner::_s_tbls_columns = { {"TABLE_CATALOG", TYPE_VARCHAR, sizeof(StringRef), true}, {"TABLE_SCHEMA", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -108,7 +110,7 @@ Status SchemaTablePropertiesScanner::get_onedb_info_from_fe(int64_t dbId) { } _tableproperties_block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError("table options schema is not match for FE and BE"); } @@ -148,7 +150,7 @@ Status SchemaTablePropertiesScanner::get_next_block_internal(vectorized::Block* if (_db_index < _db_result.db_ids.size()) { RETURN_IF_ERROR(get_onedb_info_from_fe(_db_result.db_ids[_db_index])); _row_idx = 0; // reset row index so that it start filling for next block. - _total_rows = _tableproperties_block->rows(); + _total_rows = (int)_tableproperties_block->rows(); _db_index++; } } diff --git a/be/src/exec/schema_scanner/schema_workload_group_privileges.cpp b/be/src/exec/schema_scanner/schema_workload_group_privileges.cpp index a91a28322ecd76..bdf306ef7d94ad 100644 --- a/be/src/exec/schema_scanner/schema_workload_group_privileges.cpp +++ b/be/src/exec/schema_scanner/schema_workload_group_privileges.cpp @@ -26,6 +26,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaWorkloadGroupPrivilegesScanner::_s_tbls_columns = { {"GRANTEE", TYPE_VARCHAR, sizeof(StringRef), true}, {"WORKLOAD_GROUP_NAME", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -83,7 +85,7 @@ Status SchemaWorkloadGroupPrivilegesScanner::_get_workload_group_privs_block_fro } if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError( "workload group privileges schema is not match for FE and BE"); @@ -116,7 +118,7 @@ Status SchemaWorkloadGroupPrivilegesScanner::get_next_block_internal(vectorized: if (_workload_groups_privs_block == nullptr) { RETURN_IF_ERROR(_get_workload_group_privs_block_from_fe()); - _total_rows = _workload_groups_privs_block->rows(); + _total_rows = (int)_workload_groups_privs_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_workload_group_resource_usage_scanner.cpp b/be/src/exec/schema_scanner/schema_workload_group_resource_usage_scanner.cpp index ca339044e98a5f..805bf12cc38ae6 100644 --- a/be/src/exec/schema_scanner/schema_workload_group_resource_usage_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_workload_group_resource_usage_scanner.cpp @@ -28,6 +28,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaBackendWorkloadGroupResourceUsage::_s_tbls_columns = { // name, type, size {"BE_ID", TYPE_BIGINT, sizeof(int64_t), false}, @@ -70,7 +72,7 @@ Status SchemaBackendWorkloadGroupResourceUsage::get_next_block_internal(vectoriz } ExecEnv::GetInstance()->workload_group_mgr()->get_wg_resource_usage(_block.get()); - _total_rows = _block->rows(); + _total_rows = (int)_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_workload_groups_scanner.cpp b/be/src/exec/schema_scanner/schema_workload_groups_scanner.cpp index 481360eee90557..bc5fb61669c525 100644 --- a/be/src/exec/schema_scanner/schema_workload_groups_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_workload_groups_scanner.cpp @@ -26,6 +26,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaWorkloadGroupsScanner::_s_tbls_columns = { {"ID", TYPE_BIGINT, sizeof(int64_t), true}, {"NAME", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -98,7 +100,7 @@ Status SchemaWorkloadGroupsScanner::_get_workload_groups_block_from_fe() { _workload_groups_block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError( "workload groups schema is not match for FE and BE"); @@ -127,7 +129,7 @@ Status SchemaWorkloadGroupsScanner::get_next_block_internal(vectorized::Block* b if (_workload_groups_block == nullptr) { RETURN_IF_ERROR(_get_workload_groups_block_from_fe()); - _total_rows = _workload_groups_block->rows(); + _total_rows = (int)_workload_groups_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_workload_sched_policy_scanner.cpp b/be/src/exec/schema_scanner/schema_workload_sched_policy_scanner.cpp index 5c6a6f70a88a86..fa1c671f5eeea0 100644 --- a/be/src/exec/schema_scanner/schema_workload_sched_policy_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_workload_sched_policy_scanner.cpp @@ -26,6 +26,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaWorkloadSchedulePolicyScanner::_s_tbls_columns = { {"ID", TYPE_BIGINT, sizeof(int64_t), true}, {"NAME", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -89,7 +91,7 @@ Status SchemaWorkloadSchedulePolicyScanner::_get_workload_schedule_policy_block_ _block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError( "workload policy schema is not match for FE and BE"); @@ -118,7 +120,7 @@ Status SchemaWorkloadSchedulePolicyScanner::get_next_block_internal(vectorized:: if (_block == nullptr) { RETURN_IF_ERROR(_get_workload_schedule_policy_block_from_fe()); - _total_rows = _block->rows(); + _total_rows = (int)_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/table_connector.cpp b/be/src/exec/table_connector.cpp index fa5181f5fecb2d..549fa6aae90fd8 100644 --- a/be/src/exec/table_connector.cpp +++ b/be/src/exec/table_connector.cpp @@ -118,16 +118,17 @@ Status TableConnector::convert_column_data(const vectorized::ColumnPtr& column_p fmt::format_to(_insert_stmt_buffer, "\"{}\"", str); } }; - const vectorized::IColumn* column = column_ptr; + const vectorized::IColumn* column = column_ptr.get(); if (type_ptr->is_nullable()) { - auto nullable_column = assert_cast(column_ptr.get()); + const auto* nullable_column = + assert_cast(column_ptr.get()); if (nullable_column->is_null_at(row)) { fmt::format_to(_insert_stmt_buffer, "{}", "NULL"); return Status::OK(); } column = nullable_column->get_nested_column_ptr().get(); } else { - column = column_ptr; + column = column_ptr.get(); } auto [item, size] = column->get_data_at(row); switch (type.type) { diff --git a/be/src/http/http_channel.cpp b/be/src/http/http_channel.cpp index 312f1ab9286909..598330ff7cbcfb 100644 --- a/be/src/http/http_channel.cpp +++ b/be/src/http/http_channel.cpp @@ -123,7 +123,8 @@ void HttpChannel::send_files(HttpRequest* request, const std::string& root_dir, VLOG_DEBUG << "http channel send file " << file_path << ", size: " << file_size; evbuffer_add_printf(evb.get(), "File-Name: %s\r\n", file.c_str()); - evbuffer_add_printf(evb.get(), "Content-Length: %ld\r\n", file_size); + evbuffer_add_printf(evb.get(), "Content-Length: %" PRIi64 "\r\n", file_size); + evbuffer_add_printf(evb.get(), "\r\n"); if (file_size > 0) { evbuffer_add_file(evb.get(), fd, 0, file_size); diff --git a/be/src/olap/base_tablet.cpp b/be/src/olap/base_tablet.cpp index 33275a2663b329..a4720f89d19be6 100644 --- a/be/src/olap/base_tablet.cpp +++ b/be/src/olap/base_tablet.cpp @@ -499,7 +499,7 @@ Status BaseTablet::lookup_row_key(const Slice& encoded_key, TabletSchema* latest for (auto id : picked_segments) { Status s = segments[id]->lookup_row_key(encoded_key, schema, with_seq_col, with_rowid, - &loc, encoded_seq_value, stats); + &loc, stats, encoded_seq_value); if (s.is()) { continue; } @@ -615,7 +615,7 @@ Status BaseTablet::calc_segment_delete_bitmap(RowsetSharedPtr rowset, vectorized::Block ordered_block = block.clone_empty(); uint32_t pos = 0; - RETURN_IF_ERROR(seg->load_pk_index_and_bf()); // We need index blocks to iterate + RETURN_IF_ERROR(seg->load_pk_index_and_bf(nullptr)); // We need index blocks to iterate const auto* pk_idx = seg->get_primary_key_index(); int total = pk_idx->num_rows(); uint32_t row_id = 0; @@ -629,7 +629,7 @@ Status BaseTablet::calc_segment_delete_bitmap(RowsetSharedPtr rowset, std::vector> segment_caches(specified_rowsets.size()); while (remaining > 0) { std::unique_ptr iter; - RETURN_IF_ERROR(pk_idx->new_iterator(&iter)); + RETURN_IF_ERROR(pk_idx->new_iterator(&iter, nullptr)); size_t num_to_read = std::min(batch_size, remaining); auto index_type = vectorized::DataTypeFactory::instance().create_data_type( diff --git a/be/src/olap/delete_bitmap_calculator.cpp b/be/src/olap/delete_bitmap_calculator.cpp index 017e3cff3d0489..8ac05a1e393043 100644 --- a/be/src/olap/delete_bitmap_calculator.cpp +++ b/be/src/olap/delete_bitmap_calculator.cpp @@ -145,12 +145,11 @@ Status MergeIndexDeleteBitmapCalculator::init(RowsetId rowset_id, MergeIndexDeleteBitmapCalculatorContext::Comparator(seq_col_length, _rowid_length); _contexts.reserve(segments.size()); _heap = std::make_unique(_comparator); - for (auto& segment : segments) { - RETURN_IF_ERROR(segment->load_index()); + RETURN_IF_ERROR(segment->load_index(nullptr)); auto pk_idx = segment->get_primary_key_index(); std::unique_ptr index; - RETURN_IF_ERROR(pk_idx->new_iterator(&index)); + RETURN_IF_ERROR(pk_idx->new_iterator(&index, nullptr)); auto index_type = vectorized::DataTypeFactory::instance().create_data_type( pk_idx->type_info()->type(), 1, 0); _contexts.emplace_back(std::move(index), index_type, segment->id(), pk_idx->num_rows()); diff --git a/be/src/olap/lru_cache.cpp b/be/src/olap/lru_cache.cpp index e539f4a440ab0c..9895a0138947be 100644 --- a/be/src/olap/lru_cache.cpp +++ b/be/src/olap/lru_cache.cpp @@ -604,12 +604,12 @@ ShardedLRUCache::ShardedLRUCache(const std::string& name, size_t capacity, LRUCa INT_GAUGE_METRIC_REGISTER(_entity, cache_capacity); INT_GAUGE_METRIC_REGISTER(_entity, cache_usage); INT_GAUGE_METRIC_REGISTER(_entity, cache_element_count); - INT_DOUBLE_METRIC_REGISTER(_entity, cache_usage_ratio); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_entity, cache_lookup_count); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_entity, cache_hit_count); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_entity, cache_stampede_count); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_entity, cache_miss_count); - INT_DOUBLE_METRIC_REGISTER(_entity, cache_hit_ratio); + DOUBLE_GAUGE_METRIC_REGISTER(_entity, cache_usage_ratio); + INT_COUNTER_METRIC_REGISTER(_entity, cache_lookup_count); + INT_COUNTER_METRIC_REGISTER(_entity, cache_hit_count); + INT_COUNTER_METRIC_REGISTER(_entity, cache_stampede_count); + INT_COUNTER_METRIC_REGISTER(_entity, cache_miss_count); + DOUBLE_GAUGE_METRIC_REGISTER(_entity, cache_hit_ratio); _hit_count_bvar.reset(new bvar::Adder("doris_cache", _name)); _hit_count_per_second.reset(new bvar::PerSecond>( diff --git a/be/src/olap/lru_cache.h b/be/src/olap/lru_cache.h index 303a4cf2065ef9..4a4b6ddd0054f3 100644 --- a/be/src/olap/lru_cache.h +++ b/be/src/olap/lru_cache.h @@ -447,10 +447,10 @@ class ShardedLRUCache : public Cache { IntGauge* cache_usage = nullptr; IntGauge* cache_element_count = nullptr; DoubleGauge* cache_usage_ratio = nullptr; - IntAtomicCounter* cache_lookup_count = nullptr; - IntAtomicCounter* cache_hit_count = nullptr; - IntAtomicCounter* cache_miss_count = nullptr; - IntAtomicCounter* cache_stampede_count = nullptr; + IntCounter* cache_lookup_count = nullptr; + IntCounter* cache_hit_count = nullptr; + IntCounter* cache_miss_count = nullptr; + IntCounter* cache_stampede_count = nullptr; DoubleGauge* cache_hit_ratio = nullptr; // bvars std::unique_ptr> _hit_count_bvar; diff --git a/be/src/olap/primary_key_index.cpp b/be/src/olap/primary_key_index.cpp index 5f7bedb01fc8de..00b72832ee60e0 100644 --- a/be/src/olap/primary_key_index.cpp +++ b/be/src/olap/primary_key_index.cpp @@ -95,27 +95,29 @@ Status PrimaryKeyIndexBuilder::finalize(segment_v2::PrimaryKeyIndexMetaPB* meta) } Status PrimaryKeyIndexReader::parse_index(io::FileReaderSPtr file_reader, - const segment_v2::PrimaryKeyIndexMetaPB& meta) { + const segment_v2::PrimaryKeyIndexMetaPB& meta, + OlapReaderStatistics* pk_index_load_stats) { // parse primary key index _index_reader.reset(new segment_v2::IndexedColumnReader(file_reader, meta.primary_key_index())); _index_reader->set_is_pk_index(true); RETURN_IF_ERROR(_index_reader->load(!config::disable_pk_storage_page_cache, false, - _pk_index_load_stats)); + pk_index_load_stats)); _index_parsed = true; return Status::OK(); } Status PrimaryKeyIndexReader::parse_bf(io::FileReaderSPtr file_reader, - const segment_v2::PrimaryKeyIndexMetaPB& meta) { + const segment_v2::PrimaryKeyIndexMetaPB& meta, + OlapReaderStatistics* pk_index_load_stats) { // parse bloom filter segment_v2::ColumnIndexMetaPB column_index_meta = meta.bloom_filter_index(); segment_v2::BloomFilterIndexReader bf_index_reader(std::move(file_reader), column_index_meta.bloom_filter_index()); RETURN_IF_ERROR(bf_index_reader.load(!config::disable_pk_storage_page_cache, false, - _pk_index_load_stats)); + pk_index_load_stats)); std::unique_ptr bf_iter; - RETURN_IF_ERROR(bf_index_reader.new_iterator(&bf_iter)); + RETURN_IF_ERROR(bf_index_reader.new_iterator(&bf_iter, pk_index_load_stats)); RETURN_IF_ERROR(bf_iter->read_bloom_filter(0, &_bf)); segment_v2::g_pk_total_bloom_filter_num << 1; segment_v2::g_pk_total_bloom_filter_total_bytes << _bf->size(); diff --git a/be/src/olap/primary_key_index.h b/be/src/olap/primary_key_index.h index dcbbc5f30625f4..f74d3e42030f2f 100644 --- a/be/src/olap/primary_key_index.h +++ b/be/src/olap/primary_key_index.h @@ -98,8 +98,7 @@ class PrimaryKeyIndexBuilder { class PrimaryKeyIndexReader { public: - PrimaryKeyIndexReader(OlapReaderStatistics* pk_index_load_stats = nullptr) - : _index_parsed(false), _bf_parsed(false), _pk_index_load_stats(pk_index_load_stats) {} + PrimaryKeyIndexReader() : _index_parsed(false), _bf_parsed(false) {} ~PrimaryKeyIndexReader() { segment_v2::g_pk_total_bloom_filter_num << -static_cast(_bf_num); @@ -109,12 +108,14 @@ class PrimaryKeyIndexReader { } Status parse_index(io::FileReaderSPtr file_reader, - const segment_v2::PrimaryKeyIndexMetaPB& meta); + const segment_v2::PrimaryKeyIndexMetaPB& meta, + OlapReaderStatistics* pk_index_load_stats); - Status parse_bf(io::FileReaderSPtr file_reader, const segment_v2::PrimaryKeyIndexMetaPB& meta); + Status parse_bf(io::FileReaderSPtr file_reader, const segment_v2::PrimaryKeyIndexMetaPB& meta, + OlapReaderStatistics* pk_index_load_stats); Status new_iterator(std::unique_ptr* index_iterator, - OlapReaderStatistics* stats = nullptr) const { + OlapReaderStatistics* stats) const { DCHECK(_index_parsed); index_iterator->reset(new segment_v2::IndexedColumnIterator(_index_reader.get(), stats)); return Status::OK(); @@ -155,7 +156,6 @@ class PrimaryKeyIndexReader { std::unique_ptr _bf; size_t _bf_num = 0; uint64 _bf_bytes = 0; - OlapReaderStatistics* _pk_index_load_stats = nullptr; }; } // namespace doris diff --git a/be/src/olap/push_handler.cpp b/be/src/olap/push_handler.cpp index 56d167459f5be7..eecb322384b698 100644 --- a/be/src/olap/push_handler.cpp +++ b/be/src/olap/push_handler.cpp @@ -518,7 +518,7 @@ Status PushBrokerReader::_convert_to_output_block(vectorized::Block* block) { column_ptr = _src_block.get_by_position(result_column_id).column; // column_ptr maybe a ColumnConst, convert it to a normal column column_ptr = column_ptr->convert_to_full_column_if_const(); - DCHECK(column_ptr != nullptr); + DCHECK(column_ptr); // because of src_slot_desc is always be nullable, so the column_ptr after do dest_expr // is likely to be nullable diff --git a/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.cpp b/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.cpp index 8c63c25d20acee..7c51f0a24c1b1d 100644 --- a/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.cpp +++ b/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.cpp @@ -34,9 +34,8 @@ namespace segment_v2 { Status BloomFilterIndexReader::load(bool use_page_cache, bool kept_in_memory, OlapReaderStatistics* index_load_stats) { // TODO yyq: implement a new once flag to avoid status construct. - _index_load_stats = index_load_stats; - return _load_once.call([this, use_page_cache, kept_in_memory] { - return _load(use_page_cache, kept_in_memory); + return _load_once.call([this, use_page_cache, kept_in_memory, index_load_stats] { + return _load(use_page_cache, kept_in_memory, index_load_stats); }); } @@ -45,20 +44,22 @@ int64_t BloomFilterIndexReader::get_metadata_size() const { (_bloom_filter_index_meta ? _bloom_filter_index_meta->ByteSizeLong() : 0); } -Status BloomFilterIndexReader::_load(bool use_page_cache, bool kept_in_memory) { +Status BloomFilterIndexReader::_load(bool use_page_cache, bool kept_in_memory, + OlapReaderStatistics* index_load_stats) { const IndexedColumnMetaPB& bf_index_meta = _bloom_filter_index_meta->bloom_filter(); _bloom_filter_reader.reset(new IndexedColumnReader(_file_reader, bf_index_meta)); - RETURN_IF_ERROR(_bloom_filter_reader->load(use_page_cache, kept_in_memory, _index_load_stats)); + RETURN_IF_ERROR(_bloom_filter_reader->load(use_page_cache, kept_in_memory, index_load_stats)); update_metadata_size(); return Status::OK(); } -Status BloomFilterIndexReader::new_iterator(std::unique_ptr* iterator) { +Status BloomFilterIndexReader::new_iterator(std::unique_ptr* iterator, + OlapReaderStatistics* index_load_stats) { DBUG_EXECUTE_IF("BloomFilterIndexReader::new_iterator.fail", { return Status::InternalError("new_iterator for bloom filter index failed"); }); - iterator->reset(new BloomFilterIndexIterator(this)); + iterator->reset(new BloomFilterIndexIterator(this, index_load_stats)); return Status::OK(); } diff --git a/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.h b/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.h index fcb0239a2440fa..fb53af89c0fe92 100644 --- a/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.h +++ b/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.h @@ -48,17 +48,18 @@ class BloomFilterIndexReader : public MetadataAdder { } Status load(bool use_page_cache, bool kept_in_memory, - OlapReaderStatistics* _bf_index_load_stats = nullptr); + OlapReaderStatistics* bf_index_load_stats); BloomFilterAlgorithmPB algorithm() { return _bloom_filter_index_meta->algorithm(); } // create a new column iterator. - Status new_iterator(std::unique_ptr* iterator); + Status new_iterator(std::unique_ptr* iterator, + OlapReaderStatistics* index_load_stats); const TypeInfo* type_info() const { return _type_info; } private: - Status _load(bool use_page_cache, bool kept_in_memory); + Status _load(bool use_page_cache, bool kept_in_memory, OlapReaderStatistics* index_load_stats); int64_t get_metadata_size() const override; @@ -70,13 +71,12 @@ class BloomFilterIndexReader : public MetadataAdder { const TypeInfo* _type_info = nullptr; std::unique_ptr _bloom_filter_index_meta = nullptr; std::unique_ptr _bloom_filter_reader; - OlapReaderStatistics* _index_load_stats = nullptr; }; class BloomFilterIndexIterator { public: - explicit BloomFilterIndexIterator(BloomFilterIndexReader* reader) - : _reader(reader), _bloom_filter_iter(reader->_bloom_filter_reader.get()) {} + explicit BloomFilterIndexIterator(BloomFilterIndexReader* reader, OlapReaderStatistics* stats) + : _reader(reader), _bloom_filter_iter(reader->_bloom_filter_reader.get(), stats) {} // Read bloom filter at the given ordinal into `bf`. Status read_bloom_filter(rowid_t ordinal, std::unique_ptr* bf); diff --git a/be/src/olap/rowset/segment_v2/column_reader.cpp b/be/src/olap/rowset/segment_v2/column_reader.cpp index 9d5328de869304..1abb60e58507ec 100644 --- a/be/src/olap/rowset/segment_v2/column_reader.cpp +++ b/be/src/olap/rowset/segment_v2/column_reader.cpp @@ -374,10 +374,12 @@ Status ColumnReader::read_page(const ColumnIteratorOptions& iter_opts, const Pag Status ColumnReader::get_row_ranges_by_zone_map( const AndBlockColumnPredicate* col_predicates, - const std::vector* delete_predicates, RowRanges* row_ranges) { + const std::vector* delete_predicates, RowRanges* row_ranges, + const ColumnIteratorOptions& iter_opts) { std::vector page_indexes; - RETURN_IF_ERROR(_get_filtered_pages(col_predicates, delete_predicates, &page_indexes)); - RETURN_IF_ERROR(_calculate_row_ranges(page_indexes, row_ranges)); + RETURN_IF_ERROR( + _get_filtered_pages(col_predicates, delete_predicates, &page_indexes, iter_opts)); + RETURN_IF_ERROR(_calculate_row_ranges(page_indexes, row_ranges, iter_opts)); return Status::OK(); } @@ -514,8 +516,8 @@ bool ColumnReader::_zone_map_match_condition(const ZoneMapPB& zone_map, Status ColumnReader::_get_filtered_pages( const AndBlockColumnPredicate* col_predicates, const std::vector* delete_predicates, - std::vector* page_indexes) { - RETURN_IF_ERROR(_load_zone_map_index(_use_index_page_cache, _opts.kept_in_memory)); + std::vector* page_indexes, const ColumnIteratorOptions& iter_opts) { + RETURN_IF_ERROR(_load_zone_map_index(_use_index_page_cache, _opts.kept_in_memory, iter_opts)); FieldType type = _type_info->type(); const std::vector& zone_maps = _zone_map_index->page_zone_maps(); @@ -553,9 +555,10 @@ Status ColumnReader::_get_filtered_pages( } Status ColumnReader::_calculate_row_ranges(const std::vector& page_indexes, - RowRanges* row_ranges) { + RowRanges* row_ranges, + const ColumnIteratorOptions& iter_opts) { row_ranges->clear(); - RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory)); + RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory, iter_opts)); for (auto i : page_indexes) { ordinal_t page_first_id = _ordinal_index->get_first_ordinal(i); ordinal_t page_last_id = _ordinal_index->get_last_ordinal(i); @@ -566,12 +569,14 @@ Status ColumnReader::_calculate_row_ranges(const std::vector& page_ind } Status ColumnReader::get_row_ranges_by_bloom_filter(const AndBlockColumnPredicate* col_predicates, - RowRanges* row_ranges) { - RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory)); - RETURN_IF_ERROR(_load_bloom_filter_index(_use_index_page_cache, _opts.kept_in_memory)); + RowRanges* row_ranges, + const ColumnIteratorOptions& iter_opts) { + RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory, iter_opts)); + RETURN_IF_ERROR( + _load_bloom_filter_index(_use_index_page_cache, _opts.kept_in_memory, iter_opts)); RowRanges bf_row_ranges; std::unique_ptr bf_iter; - RETURN_IF_ERROR(_bloom_filter_index->new_iterator(&bf_iter)); + RETURN_IF_ERROR(_bloom_filter_index->new_iterator(&bf_iter, iter_opts.stats)); size_t range_size = row_ranges->range_size(); // get covered page ids std::set page_ids; @@ -598,16 +603,18 @@ Status ColumnReader::get_row_ranges_by_bloom_filter(const AndBlockColumnPredicat return Status::OK(); } -Status ColumnReader::_load_ordinal_index(bool use_page_cache, bool kept_in_memory) { +Status ColumnReader::_load_ordinal_index(bool use_page_cache, bool kept_in_memory, + const ColumnIteratorOptions& iter_opts) { if (!_ordinal_index) { return Status::InternalError("ordinal_index not inited"); } - return _ordinal_index->load(use_page_cache, kept_in_memory); + return _ordinal_index->load(use_page_cache, kept_in_memory, iter_opts.stats); } -Status ColumnReader::_load_zone_map_index(bool use_page_cache, bool kept_in_memory) { +Status ColumnReader::_load_zone_map_index(bool use_page_cache, bool kept_in_memory, + const ColumnIteratorOptions& iter_opts) { if (_zone_map_index != nullptr) { - return _zone_map_index->load(use_page_cache, kept_in_memory); + return _zone_map_index->load(use_page_cache, kept_in_memory, iter_opts.stats); } return Status::OK(); } @@ -681,15 +688,17 @@ bool ColumnReader::has_bloom_filter_index(bool ngram) const { } } -Status ColumnReader::_load_bloom_filter_index(bool use_page_cache, bool kept_in_memory) { +Status ColumnReader::_load_bloom_filter_index(bool use_page_cache, bool kept_in_memory, + const ColumnIteratorOptions& iter_opts) { if (_bloom_filter_index != nullptr) { - return _bloom_filter_index->load(use_page_cache, kept_in_memory); + return _bloom_filter_index->load(use_page_cache, kept_in_memory, iter_opts.stats); } return Status::OK(); } -Status ColumnReader::seek_to_first(OrdinalPageIndexIterator* iter) { - RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory)); +Status ColumnReader::seek_to_first(OrdinalPageIndexIterator* iter, + const ColumnIteratorOptions& iter_opts) { + RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory, iter_opts)); *iter = _ordinal_index->begin(); if (!iter->valid()) { return Status::NotFound("Failed to seek to first rowid"); @@ -697,8 +706,9 @@ Status ColumnReader::seek_to_first(OrdinalPageIndexIterator* iter) { return Status::OK(); } -Status ColumnReader::seek_at_or_before(ordinal_t ordinal, OrdinalPageIndexIterator* iter) { - RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory)); +Status ColumnReader::seek_at_or_before(ordinal_t ordinal, OrdinalPageIndexIterator* iter, + const ColumnIteratorOptions& iter_opts) { + RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory, iter_opts)); *iter = _ordinal_index->seek_at_or_before(ordinal); if (!iter->valid()) { return Status::NotFound("Failed to seek to ordinal {}, ", ordinal); @@ -1172,7 +1182,7 @@ Status FileColumnIterator::init(const ColumnIteratorOptions& opts) { FileColumnIterator::~FileColumnIterator() = default; Status FileColumnIterator::seek_to_first() { - RETURN_IF_ERROR(_reader->seek_to_first(&_page_iter)); + RETURN_IF_ERROR(_reader->seek_to_first(&_page_iter, _opts)); RETURN_IF_ERROR(_read_data_page(_page_iter)); _seek_to_pos_in_page(&_page, 0); @@ -1183,7 +1193,7 @@ Status FileColumnIterator::seek_to_first() { Status FileColumnIterator::seek_to_ordinal(ordinal_t ord) { // if current page contains this row, we don't need to seek if (!_page || !_page.contains(ord) || !_page_iter.valid()) { - RETURN_IF_ERROR(_reader->seek_at_or_before(ord, &_page_iter)); + RETURN_IF_ERROR(_reader->seek_at_or_before(ord, &_page_iter, _opts)); RETURN_IF_ERROR(_read_data_page(_page_iter)); } _seek_to_pos_in_page(&_page, ord - _page.first_ordinal); @@ -1257,8 +1267,8 @@ Status FileColumnIterator::next_batch(size_t* n, vectorized::MutableColumnPtr& d DCHECK_EQ(this_run, num_rows); } else { *has_null = true; - auto* null_col = - vectorized::check_and_get_column(dst); + const auto* null_col = + vectorized::check_and_get_column(dst.get()); if (null_col != nullptr) { const_cast(null_col)->insert_null_elements( this_run); @@ -1318,8 +1328,9 @@ Status FileColumnIterator::read_by_rowids(const rowid_t* rowids, const size_t co auto origin_index = _page.data_decoder->current_index(); if (this_read_count > 0) { if (is_null) { - auto* null_col = - vectorized::check_and_get_column(dst); + const auto* null_col = + vectorized::check_and_get_column( + dst.get()); if (UNLIKELY(null_col == nullptr)) { return Status::InternalError("unexpected column type in column reader"); } @@ -1431,8 +1442,8 @@ Status FileColumnIterator::get_row_ranges_by_zone_map( const AndBlockColumnPredicate* col_predicates, const std::vector* delete_predicates, RowRanges* row_ranges) { if (_reader->has_zone_map()) { - RETURN_IF_ERROR( - _reader->get_row_ranges_by_zone_map(col_predicates, delete_predicates, row_ranges)); + RETURN_IF_ERROR(_reader->get_row_ranges_by_zone_map(col_predicates, delete_predicates, + row_ranges, _opts)); } return Status::OK(); } @@ -1441,7 +1452,7 @@ Status FileColumnIterator::get_row_ranges_by_bloom_filter( const AndBlockColumnPredicate* col_predicates, RowRanges* row_ranges) { if ((col_predicates->can_do_bloom_filter(false) && _reader->has_bloom_filter_index(false)) || (col_predicates->can_do_bloom_filter(true) && _reader->has_bloom_filter_index(true))) { - RETURN_IF_ERROR(_reader->get_row_ranges_by_bloom_filter(col_predicates, row_ranges)); + RETURN_IF_ERROR(_reader->get_row_ranges_by_bloom_filter(col_predicates, row_ranges, _opts)); } return Status::OK(); } @@ -1700,9 +1711,9 @@ Status DefaultNestedColumnIterator::next_batch(size_t* n, vectorized::MutableCol static void fill_nested_with_defaults(vectorized::MutableColumnPtr& dst, vectorized::MutableColumnPtr& sibling_column, size_t nrows) { const auto* sibling_array = vectorized::check_and_get_column( - remove_nullable(sibling_column->get_ptr())); + remove_nullable(sibling_column->get_ptr()).get()); const auto* dst_array = vectorized::check_and_get_column( - remove_nullable(dst->get_ptr())); + remove_nullable(dst->get_ptr()).get()); if (!dst_array || !sibling_array) { throw doris::Exception(ErrorCode::INTERNAL_ERROR, "Expected array column, but met %s and %s", dst->get_name(), diff --git a/be/src/olap/rowset/segment_v2/column_reader.h b/be/src/olap/rowset/segment_v2/column_reader.h index d72d802f97769b..7e32b3a09b34da 100644 --- a/be/src/olap/rowset/segment_v2/column_reader.h +++ b/be/src/olap/rowset/segment_v2/column_reader.h @@ -148,8 +148,9 @@ class ColumnReader : public MetadataAdder { std::unique_ptr* iterator); // Seek to the first entry in the column. - Status seek_to_first(OrdinalPageIndexIterator* iter); - Status seek_at_or_before(ordinal_t ordinal, OrdinalPageIndexIterator* iter); + Status seek_to_first(OrdinalPageIndexIterator* iter, const ColumnIteratorOptions& iter_opts); + Status seek_at_or_before(ordinal_t ordinal, OrdinalPageIndexIterator* iter, + const ColumnIteratorOptions& iter_opts); // read a page from file into a page handle Status read_page(const ColumnIteratorOptions& iter_opts, const PagePointer& pp, @@ -175,11 +176,13 @@ class ColumnReader : public MetadataAdder { // - delete_condition is a delete predicate of one version Status get_row_ranges_by_zone_map(const AndBlockColumnPredicate* col_predicates, const std::vector* delete_predicates, - RowRanges* row_ranges); + RowRanges* row_ranges, + const ColumnIteratorOptions& iter_opts); // get row ranges with bloom filter index Status get_row_ranges_by_bloom_filter(const AndBlockColumnPredicate* col_predicates, - RowRanges* row_ranges); + RowRanges* row_ranges, + const ColumnIteratorOptions& iter_opts); PagePointer get_dict_page_pointer() const { return _meta_dict_page; } @@ -219,13 +222,16 @@ class ColumnReader : public MetadataAdder { return Status::OK(); } - [[nodiscard]] Status _load_zone_map_index(bool use_page_cache, bool kept_in_memory); - [[nodiscard]] Status _load_ordinal_index(bool use_page_cache, bool kept_in_memory); + [[nodiscard]] Status _load_zone_map_index(bool use_page_cache, bool kept_in_memory, + const ColumnIteratorOptions& iter_opts); + [[nodiscard]] Status _load_ordinal_index(bool use_page_cache, bool kept_in_memory, + const ColumnIteratorOptions& iter_opts); [[nodiscard]] Status _load_bitmap_index(bool use_page_cache, bool kept_in_memory); [[nodiscard]] Status _load_inverted_index_index( std::shared_ptr index_file_reader, const TabletIndex* index_meta); - [[nodiscard]] Status _load_bloom_filter_index(bool use_page_cache, bool kept_in_memory); + [[nodiscard]] Status _load_bloom_filter_index(bool use_page_cache, bool kept_in_memory, + const ColumnIteratorOptions& iter_opts); bool _zone_map_match_condition(const ZoneMapPB& zone_map, WrapperField* min_value_container, WrapperField* max_value_container, @@ -239,9 +245,11 @@ class ColumnReader : public MetadataAdder { Status _get_filtered_pages(const AndBlockColumnPredicate* col_predicates, const std::vector* delete_predicates, - std::vector* page_indexes); + std::vector* page_indexes, + const ColumnIteratorOptions& iter_opts); - Status _calculate_row_ranges(const std::vector& page_indexes, RowRanges* row_ranges); + Status _calculate_row_ranges(const std::vector& page_indexes, RowRanges* row_ranges, + const ColumnIteratorOptions& iter_opts); int64_t get_metadata_size() const override; diff --git a/be/src/olap/rowset/segment_v2/hierarchical_data_reader.h b/be/src/olap/rowset/segment_v2/hierarchical_data_reader.h index f85038713cadb7..bd5de7484740a8 100644 --- a/be/src/olap/rowset/segment_v2/hierarchical_data_reader.h +++ b/be/src/olap/rowset/segment_v2/hierarchical_data_reader.h @@ -165,8 +165,8 @@ class HierarchicalDataReader : public ColumnIterator { // will type the type of ColumnObject::NESTED_TYPE, whih is Nullable>. for (auto& entry : nested_subcolumns) { MutableColumnPtr nested_object = ColumnObject::create(true, false); - const auto* base_array = - check_and_get_column(remove_nullable(entry.second[0].column)); + const auto* base_array = check_and_get_column( + remove_nullable(entry.second[0].column).get()); MutableColumnPtr offset = base_array->get_offsets_ptr()->assume_mutable(); auto* nested_object_ptr = assert_cast(nested_object.get()); // flatten nested arrays diff --git a/be/src/olap/rowset/segment_v2/indexed_column_reader.cpp b/be/src/olap/rowset/segment_v2/indexed_column_reader.cpp index da6beff5d8d6a2..3f582293ee4d7f 100644 --- a/be/src/olap/rowset/segment_v2/indexed_column_reader.cpp +++ b/be/src/olap/rowset/segment_v2/indexed_column_reader.cpp @@ -66,7 +66,6 @@ Status IndexedColumnReader::load(bool use_page_cache, bool kept_in_memory, OlapReaderStatistics* index_load_stats) { _use_page_cache = use_page_cache; _kept_in_memory = kept_in_memory; - _index_load_stats = index_load_stats; _type_info = get_scalar_type_info((FieldType)_meta.data_type()); if (_type_info == nullptr) { @@ -82,7 +81,7 @@ Status IndexedColumnReader::load(bool use_page_cache, bool kept_in_memory, } else { RETURN_IF_ERROR(load_index_page(_meta.ordinal_index_meta().root_page(), &_ordinal_index_page_handle, - _ordinal_index_reader.get())); + _ordinal_index_reader.get(), index_load_stats)); _has_index_page = true; } } @@ -93,7 +92,8 @@ Status IndexedColumnReader::load(bool use_page_cache, bool kept_in_memory, _sole_data_page = PagePointer(_meta.value_index_meta().root_page()); } else { RETURN_IF_ERROR(load_index_page(_meta.value_index_meta().root_page(), - &_value_index_page_handle, _value_index_reader.get())); + &_value_index_page_handle, _value_index_reader.get(), + index_load_stats)); _has_index_page = true; } } @@ -104,13 +104,14 @@ Status IndexedColumnReader::load(bool use_page_cache, bool kept_in_memory, } Status IndexedColumnReader::load_index_page(const PagePointerPB& pp, PageHandle* handle, - IndexPageReader* reader) { + IndexPageReader* reader, + OlapReaderStatistics* index_load_stats) { Slice body; PageFooterPB footer; BlockCompressionCodec* local_compress_codec; RETURN_IF_ERROR(get_block_compression_codec(_meta.compression(), &local_compress_codec)); RETURN_IF_ERROR(read_page(PagePointer(pp), handle, &body, &footer, INDEX_PAGE, - local_compress_codec, false, _index_load_stats)); + local_compress_codec, false, index_load_stats)); RETURN_IF_ERROR(reader->parse(body, footer.index_page_footer())); _mem_size += body.get_size(); return Status::OK(); diff --git a/be/src/olap/rowset/segment_v2/indexed_column_reader.h b/be/src/olap/rowset/segment_v2/indexed_column_reader.h index c9640c0007c153..6e62feaafdcdd1 100644 --- a/be/src/olap/rowset/segment_v2/indexed_column_reader.h +++ b/be/src/olap/rowset/segment_v2/indexed_column_reader.h @@ -76,7 +76,8 @@ class IndexedColumnReader : public MetadataAdder { void set_is_pk_index(bool is_pk) { _is_pk_index = is_pk; } private: - Status load_index_page(const PagePointerPB& pp, PageHandle* handle, IndexPageReader* reader); + Status load_index_page(const PagePointerPB& pp, PageHandle* handle, IndexPageReader* reader, + OlapReaderStatistics* index_load_stats); int64_t get_metadata_size() const override; @@ -103,7 +104,6 @@ class IndexedColumnReader : public MetadataAdder { const KeyCoder* _value_key_coder = nullptr; uint64_t _mem_size = 0; bool _is_pk_index = false; - OlapReaderStatistics* _index_load_stats = nullptr; }; class IndexedColumnIterator { diff --git a/be/src/olap/rowset/segment_v2/inverted_index_reader.cpp b/be/src/olap/rowset/segment_v2/inverted_index_reader.cpp index b40f9121125207..9790d7273e1bff 100644 --- a/be/src/olap/rowset/segment_v2/inverted_index_reader.cpp +++ b/be/src/olap/rowset/segment_v2/inverted_index_reader.cpp @@ -164,16 +164,48 @@ Status InvertedIndexReader::read_null_bitmap(const io::IOContext* io_ctx, return Status::OK(); } +Status InvertedIndexReader::handle_query_cache(RuntimeState* runtime_state, + InvertedIndexQueryCache* cache, + const InvertedIndexQueryCache::CacheKey& cache_key, + InvertedIndexQueryCacheHandle* cache_handler, + OlapReaderStatistics* stats, + std::shared_ptr& bit_map) { + const auto& query_options = runtime_state->query_options(); + if (query_options.enable_inverted_index_query_cache && + cache->lookup(cache_key, cache_handler)) { + DBUG_EXECUTE_IF("InvertedIndexReader.handle_query_cache_hit", { + return Status::Error("handle query cache hit"); + }); + stats->inverted_index_query_cache_hit++; + SCOPED_RAW_TIMER(&stats->inverted_index_query_bitmap_copy_timer); + bit_map = cache_handler->get_bitmap(); + return Status::OK(); + } + DBUG_EXECUTE_IF("InvertedIndexReader.handle_query_cache_miss", { + return Status::Error("handle query cache miss"); + }); + stats->inverted_index_query_cache_miss++; + return Status::Error("cache miss"); +} + Status InvertedIndexReader::handle_searcher_cache( - InvertedIndexCacheHandle* inverted_index_cache_handle, const io::IOContext* io_ctx, - OlapReaderStatistics* stats) { + RuntimeState* runtime_state, InvertedIndexCacheHandle* inverted_index_cache_handle, + const io::IOContext* io_ctx, OlapReaderStatistics* stats) { auto index_file_key = _inverted_index_file_reader->get_index_file_cache_key(&_index_meta); InvertedIndexSearcherCache::CacheKey searcher_cache_key(index_file_key); - if (InvertedIndexSearcherCache::instance()->lookup(searcher_cache_key, + const auto& query_options = runtime_state->query_options(); + if (query_options.enable_inverted_index_searcher_cache && + InvertedIndexSearcherCache::instance()->lookup(searcher_cache_key, inverted_index_cache_handle)) { + DBUG_EXECUTE_IF("InvertedIndexReader.handle_searcher_cache_hit", { + return Status::Error("handle searcher cache hit"); + }); stats->inverted_index_searcher_cache_hit++; return Status::OK(); } else { + DBUG_EXECUTE_IF("InvertedIndexReader.handle_searcher_cache_miss", { + return Status::Error("handle searcher cache miss"); + }); // searcher cache miss stats->inverted_index_searcher_cache_miss++; auto mem_tracker = std::make_unique("InvertedIndexSearcherCacheWithRead"); @@ -311,14 +343,16 @@ Status FullTextIndexReader::query(const io::IOContext* io_ctx, OlapReaderStatist InvertedIndexQueryCacheHandle cache_handler; std::shared_ptr term_match_bitmap = nullptr; - auto cache_status = handle_query_cache(cache, cache_key, &cache_handler, stats, bit_map); + auto cache_status = + handle_query_cache(runtime_state, cache, cache_key, &cache_handler, stats, bit_map); if (cache_status.ok()) { return Status::OK(); } FulltextIndexSearcherPtr* searcher_ptr = nullptr; InvertedIndexCacheHandle inverted_index_cache_handle; - RETURN_IF_ERROR(handle_searcher_cache(&inverted_index_cache_handle, io_ctx, stats)); + RETURN_IF_ERROR( + handle_searcher_cache(runtime_state, &inverted_index_cache_handle, io_ctx, stats)); auto searcher_variant = inverted_index_cache_handle.get_index_searcher(); searcher_ptr = std::get_if(&searcher_variant); if (searcher_ptr != nullptr) { @@ -379,7 +413,8 @@ Status StringTypeInvertedIndexReader::query(const io::IOContext* io_ctx, search_str}; auto* cache = InvertedIndexQueryCache::instance(); InvertedIndexQueryCacheHandle cache_handler; - auto cache_status = handle_query_cache(cache, cache_key, &cache_handler, stats, bit_map); + auto cache_status = + handle_query_cache(runtime_state, cache, cache_key, &cache_handler, stats, bit_map); if (cache_status.ok()) { return Status::OK(); } @@ -393,7 +428,8 @@ Status StringTypeInvertedIndexReader::query(const io::IOContext* io_ctx, auto result = std::make_shared(); FulltextIndexSearcherPtr* searcher_ptr = nullptr; InvertedIndexCacheHandle inverted_index_cache_handle; - RETURN_IF_ERROR(handle_searcher_cache(&inverted_index_cache_handle, io_ctx, stats)); + RETURN_IF_ERROR( + handle_searcher_cache(runtime_state, &inverted_index_cache_handle, io_ctx, stats)); auto searcher_variant = inverted_index_cache_handle.get_index_searcher(); searcher_ptr = std::get_if(&searcher_variant); if (searcher_ptr != nullptr) { @@ -609,11 +645,12 @@ Status BkdIndexReader::invoke_bkd_query(const void* query_value, InvertedIndexQu } Status BkdIndexReader::try_query(const io::IOContext* io_ctx, OlapReaderStatistics* stats, - const std::string& column_name, const void* query_value, - InvertedIndexQueryType query_type, uint32_t* count) { + RuntimeState* runtime_state, const std::string& column_name, + const void* query_value, InvertedIndexQueryType query_type, + uint32_t* count) { try { std::shared_ptr r; - auto st = get_bkd_reader(r, io_ctx, stats); + auto st = get_bkd_reader(r, io_ctx, stats, runtime_state); if (!st.ok()) { LOG(WARNING) << "get bkd reader for " << _inverted_index_file_reader->get_index_file_path(&_index_meta) @@ -629,7 +666,8 @@ Status BkdIndexReader::try_query(const io::IOContext* io_ctx, OlapReaderStatisti auto* cache = InvertedIndexQueryCache::instance(); InvertedIndexQueryCacheHandle cache_handler; std::shared_ptr bit_map; - auto cache_status = handle_query_cache(cache, cache_key, &cache_handler, stats, bit_map); + auto cache_status = + handle_query_cache(runtime_state, cache, cache_key, &cache_handler, stats, bit_map); if (cache_status.ok()) { *count = bit_map->cardinality(); return Status::OK(); @@ -653,7 +691,7 @@ Status BkdIndexReader::query(const io::IOContext* io_ctx, OlapReaderStatistics* try { std::shared_ptr r; - auto st = get_bkd_reader(r, io_ctx, stats); + auto st = get_bkd_reader(r, io_ctx, stats, runtime_state); if (!st.ok()) { LOG(WARNING) << "get bkd reader for " << _inverted_index_file_reader->get_index_file_path(&_index_meta) @@ -668,7 +706,8 @@ Status BkdIndexReader::query(const io::IOContext* io_ctx, OlapReaderStatistics* query_str}; auto* cache = InvertedIndexQueryCache::instance(); InvertedIndexQueryCacheHandle cache_handler; - auto cache_status = handle_query_cache(cache, cache_key, &cache_handler, stats, bit_map); + auto cache_status = + handle_query_cache(runtime_state, cache, cache_key, &cache_handler, stats, bit_map); if (cache_status.ok()) { return Status::OK(); } @@ -690,10 +729,11 @@ Status BkdIndexReader::query(const io::IOContext* io_ctx, OlapReaderStatistics* } Status BkdIndexReader::get_bkd_reader(BKDIndexSearcherPtr& bkd_reader, const io::IOContext* io_ctx, - OlapReaderStatistics* stats) { + OlapReaderStatistics* stats, RuntimeState* runtime_state) { BKDIndexSearcherPtr* bkd_searcher = nullptr; InvertedIndexCacheHandle inverted_index_cache_handle; - RETURN_IF_ERROR(handle_searcher_cache(&inverted_index_cache_handle, io_ctx, stats)); + RETURN_IF_ERROR( + handle_searcher_cache(runtime_state, &inverted_index_cache_handle, io_ctx, stats)); auto searcher_variant = inverted_index_cache_handle.get_index_searcher(); bkd_searcher = std::get_if(&searcher_variant); if (bkd_searcher) { @@ -1138,8 +1178,8 @@ Status InvertedIndexIterator::try_read_from_inverted_index(const std::string& co query_type == InvertedIndexQueryType::LESS_EQUAL_QUERY || query_type == InvertedIndexQueryType::LESS_THAN_QUERY || query_type == InvertedIndexQueryType::EQUAL_QUERY) { - RETURN_IF_ERROR( - _reader->try_query(&_io_ctx, _stats, column_name, query_value, query_type, count)); + RETURN_IF_ERROR(_reader->try_query(&_io_ctx, _stats, _runtime_state, column_name, + query_value, query_type, count)); } return Status::OK(); } diff --git a/be/src/olap/rowset/segment_v2/inverted_index_reader.h b/be/src/olap/rowset/segment_v2/inverted_index_reader.h index a1445603286619..bbd148fae5250d 100644 --- a/be/src/olap/rowset/segment_v2/inverted_index_reader.h +++ b/be/src/olap/rowset/segment_v2/inverted_index_reader.h @@ -190,8 +190,9 @@ class InvertedIndexReader : public std::enable_shared_from_this& bit_map) = 0; virtual Status try_query(const io::IOContext* io_ctx, OlapReaderStatistics* stats, - const std::string& column_name, const void* query_value, - InvertedIndexQueryType query_type, uint32_t* count) = 0; + RuntimeState* runtime_state, const std::string& column_name, + const void* query_value, InvertedIndexQueryType query_type, + uint32_t* count) = 0; Status read_null_bitmap(const io::IOContext* io_ctx, OlapReaderStatistics* stats, InvertedIndexQueryCacheHandle* cache_handle, @@ -208,22 +209,14 @@ class InvertedIndexReader : public std::enable_shared_from_this& bit_map) { - if (cache->lookup(cache_key, cache_handler)) { - stats->inverted_index_query_cache_hit++; - SCOPED_RAW_TIMER(&stats->inverted_index_query_bitmap_copy_timer); - bit_map = cache_handler->get_bitmap(); - return Status::OK(); - } - stats->inverted_index_query_cache_miss++; - return Status::Error("cache miss"); - } + std::shared_ptr& bit_map); - virtual Status handle_searcher_cache(InvertedIndexCacheHandle* inverted_index_cache_handle, + virtual Status handle_searcher_cache(RuntimeState* runtime_state, + InvertedIndexCacheHandle* inverted_index_cache_handle, const io::IOContext* io_ctx, OlapReaderStatistics* stats); std::string get_index_file_path(); static Status create_index_searcher(lucene::store::Directory* dir, IndexSearcherPtr* searcher, @@ -262,8 +255,9 @@ class FullTextIndexReader : public InvertedIndexReader { const void* query_value, InvertedIndexQueryType query_type, std::shared_ptr& bit_map) override; Status try_query(const io::IOContext* io_ctx, OlapReaderStatistics* stats, - const std::string& column_name, const void* query_value, - InvertedIndexQueryType query_type, uint32_t* count) override { + RuntimeState* runtime_state, const std::string& column_name, + const void* query_value, InvertedIndexQueryType query_type, + uint32_t* count) override { return Status::Error( "FullTextIndexReader not support try_query"); } @@ -289,8 +283,9 @@ class StringTypeInvertedIndexReader : public InvertedIndexReader { const void* query_value, InvertedIndexQueryType query_type, std::shared_ptr& bit_map) override; Status try_query(const io::IOContext* io_ctx, OlapReaderStatistics* stats, - const std::string& column_name, const void* query_value, - InvertedIndexQueryType query_type, uint32_t* count) override { + RuntimeState* runtime_state, const std::string& column_name, + const void* query_value, InvertedIndexQueryType query_type, + uint32_t* count) override { return Status::Error( "StringTypeInvertedIndexReader not support try_query"); } @@ -350,8 +345,9 @@ class BkdIndexReader : public InvertedIndexReader { const void* query_value, InvertedIndexQueryType query_type, std::shared_ptr& bit_map) override; Status try_query(const io::IOContext* io_ctx, OlapReaderStatistics* stats, - const std::string& column_name, const void* query_value, - InvertedIndexQueryType query_type, uint32_t* count) override; + RuntimeState* runtime_state, const std::string& column_name, + const void* query_value, InvertedIndexQueryType query_type, + uint32_t* count) override; Status invoke_bkd_try_query(const void* query_value, InvertedIndexQueryType query_type, std::shared_ptr r, uint32_t* count); Status invoke_bkd_query(const void* query_value, InvertedIndexQueryType query_type, @@ -364,7 +360,7 @@ class BkdIndexReader : public InvertedIndexReader { InvertedIndexReaderType type() override; Status get_bkd_reader(BKDIndexSearcherPtr& reader, const io::IOContext* io_ctx, - OlapReaderStatistics* stats); + OlapReaderStatistics* stats, RuntimeState* runtime_state); private: const TypeInfo* _type_info {}; diff --git a/be/src/olap/rowset/segment_v2/ordinal_page_index.cpp b/be/src/olap/rowset/segment_v2/ordinal_page_index.cpp index 9ee82bacdd73d2..4995e779892646 100644 --- a/be/src/olap/rowset/segment_v2/ordinal_page_index.cpp +++ b/be/src/olap/rowset/segment_v2/ordinal_page_index.cpp @@ -69,15 +69,17 @@ Status OrdinalIndexWriter::finish(io::FileWriter* file_writer, ColumnIndexMetaPB return Status::OK(); } -Status OrdinalIndexReader::load(bool use_page_cache, bool kept_in_memory) { +Status OrdinalIndexReader::load(bool use_page_cache, bool kept_in_memory, + OlapReaderStatistics* index_load_stats) { // TODO yyq: implement a new once flag to avoid status construct. - return _load_once.call([this, use_page_cache, kept_in_memory] { - return _load(use_page_cache, kept_in_memory, std::move(_meta_pb)); + return _load_once.call([this, use_page_cache, kept_in_memory, index_load_stats] { + return _load(use_page_cache, kept_in_memory, std::move(_meta_pb), index_load_stats); }); } Status OrdinalIndexReader::_load(bool use_page_cache, bool kept_in_memory, - std::unique_ptr index_meta) { + std::unique_ptr index_meta, + OlapReaderStatistics* stats) { if (index_meta->root_page().is_root_data_page()) { // only one data page, no index page _num_pages = 1; @@ -88,6 +90,7 @@ Status OrdinalIndexReader::_load(bool use_page_cache, bool kept_in_memory, } // need to read index page OlapReaderStatistics tmp_stats; + OlapReaderStatistics* stats_ptr = stats != nullptr ? stats : &tmp_stats; PageReadOptions opts { .use_page_cache = use_page_cache, .kept_in_memory = kept_in_memory, @@ -96,8 +99,9 @@ Status OrdinalIndexReader::_load(bool use_page_cache, bool kept_in_memory, .page_pointer = PagePointer(index_meta->root_page().root_page()), // ordinal index page uses NO_COMPRESSION right now .codec = nullptr, - .stats = &tmp_stats, - .io_ctx = io::IOContext {.is_index_data = true}, + .stats = stats_ptr, + .io_ctx = io::IOContext {.is_index_data = true, + .file_cache_stats = &stats_ptr->file_cache_stats}, }; // read index page diff --git a/be/src/olap/rowset/segment_v2/ordinal_page_index.h b/be/src/olap/rowset/segment_v2/ordinal_page_index.h index 1d74cf989520aa..df60edb12d1481 100644 --- a/be/src/olap/rowset/segment_v2/ordinal_page_index.h +++ b/be/src/olap/rowset/segment_v2/ordinal_page_index.h @@ -75,7 +75,7 @@ class OrdinalIndexReader : public MetadataAdder { virtual ~OrdinalIndexReader(); // load and parse the index page into memory - Status load(bool use_page_cache, bool kept_in_memory); + Status load(bool use_page_cache, bool kept_in_memory, OlapReaderStatistics* index_load_stats); // the returned iter points to the largest element which is less than `ordinal`, // or points to the first element if all elements are greater than `ordinal`, @@ -94,7 +94,8 @@ class OrdinalIndexReader : public MetadataAdder { private: Status _load(bool use_page_cache, bool kept_in_memory, - std::unique_ptr index_meta); + std::unique_ptr index_meta, + OlapReaderStatistics* index_load_stats); int64_t get_metadata_size() const override; diff --git a/be/src/olap/rowset/segment_v2/segment.cpp b/be/src/olap/rowset/segment_v2/segment.cpp index d55d84901c2e66..b5ab3f0e873549 100644 --- a/be/src/olap/rowset/segment_v2/segment.cpp +++ b/be/src/olap/rowset/segment_v2/segment.cpp @@ -290,7 +290,7 @@ Status Segment::new_iterator(SchemaSPtr schema, const StorageReadOptions& read_o { SCOPED_RAW_TIMER(&read_options.stats->segment_load_index_timer_ns); - RETURN_IF_ERROR(load_index()); + RETURN_IF_ERROR(load_index(read_options.stats)); } if (read_options.delete_condition_predicates->num_of_column_predicate() == 0 && @@ -475,7 +475,7 @@ Status Segment::_parse_footer(SegmentFooterPB* footer) { return Status::OK(); } -Status Segment::_load_pk_bloom_filter() { +Status Segment::_load_pk_bloom_filter(OlapReaderStatistics* stats) { #ifdef BE_TEST if (_pk_index_meta == nullptr) { // for BE UT "segment_cache_test" @@ -490,30 +490,30 @@ Status Segment::_load_pk_bloom_filter() { DCHECK(_pk_index_meta != nullptr); DCHECK(_pk_index_reader != nullptr); - return _load_pk_bf_once.call([this] { - RETURN_IF_ERROR(_pk_index_reader->parse_bf(_file_reader, *_pk_index_meta)); + return _load_pk_bf_once.call([this, stats] { + RETURN_IF_ERROR(_pk_index_reader->parse_bf(_file_reader, *_pk_index_meta, stats)); // _meta_mem_usage += _pk_index_reader->get_bf_memory_size(); return Status::OK(); }); } Status Segment::load_pk_index_and_bf(OlapReaderStatistics* index_load_stats) { - _pk_index_load_stats = index_load_stats; - RETURN_IF_ERROR(load_index()); - RETURN_IF_ERROR(_load_pk_bloom_filter()); + RETURN_IF_ERROR(load_index(index_load_stats)); + RETURN_IF_ERROR(_load_pk_bloom_filter(index_load_stats)); return Status::OK(); } -Status Segment::load_index() { - return _load_index_once.call([this] { +Status Segment::load_index(OlapReaderStatistics* stats) { + return _load_index_once.call([this, stats] { if (_tablet_schema->keys_type() == UNIQUE_KEYS && _pk_index_meta != nullptr) { - _pk_index_reader = std::make_unique(_pk_index_load_stats); - RETURN_IF_ERROR(_pk_index_reader->parse_index(_file_reader, *_pk_index_meta)); + _pk_index_reader = std::make_unique(); + RETURN_IF_ERROR(_pk_index_reader->parse_index(_file_reader, *_pk_index_meta, stats)); // _meta_mem_usage += _pk_index_reader->get_memory_size(); return Status::OK(); } else { // read and parse short key index page OlapReaderStatistics tmp_stats; + OlapReaderStatistics* stats_ptr = stats != nullptr ? stats : &tmp_stats; PageReadOptions opts { .use_page_cache = true, .type = INDEX_PAGE, @@ -522,7 +522,8 @@ Status Segment::load_index() { // short key index page uses NO_COMPRESSION for now .codec = nullptr, .stats = &tmp_stats, - .io_ctx = io::IOContext {.is_index_data = true}, + .io_ctx = io::IOContext {.is_index_data = true, + .file_cache_stats = &stats_ptr->file_cache_stats}, }; Slice body; PageFooterPB footer; @@ -970,8 +971,8 @@ Status Segment::new_inverted_index_iterator(const TabletColumn& tablet_column, Status Segment::lookup_row_key(const Slice& key, const TabletSchema* latest_schema, bool with_seq_col, bool with_rowid, RowLocation* row_location, - std::string* encoded_seq_value, OlapReaderStatistics* stats) { - RETURN_IF_ERROR(load_pk_index_and_bf()); + OlapReaderStatistics* stats, std::string* encoded_seq_value) { + RETURN_IF_ERROR(load_pk_index_and_bf(stats)); bool has_seq_col = latest_schema->has_sequence_col(); bool has_rowid = !latest_schema->cluster_key_uids().empty(); size_t seq_col_length = 0; @@ -1071,9 +1072,10 @@ Status Segment::lookup_row_key(const Slice& key, const TabletSchema* latest_sche } Status Segment::read_key_by_rowid(uint32_t row_id, std::string* key) { - RETURN_IF_ERROR(load_pk_index_and_bf()); + OlapReaderStatistics* null_stat = nullptr; + RETURN_IF_ERROR(load_pk_index_and_bf(null_stat)); std::unique_ptr iter; - RETURN_IF_ERROR(_pk_index_reader->new_iterator(&iter)); + RETURN_IF_ERROR(_pk_index_reader->new_iterator(&iter, null_stat)); auto index_type = vectorized::DataTypeFactory::instance().create_data_type( _pk_index_reader->type_info()->type(), 1, 0); @@ -1129,7 +1131,8 @@ Status Segment::seek_and_read_by_rowid(const TabletSchema& schema, SlotDescripto .use_page_cache = !config::disable_storage_page_cache, .file_reader = file_reader().get(), .stats = &stats, - .io_ctx = io::IOContext {.reader_type = ReaderType::READER_QUERY}, + .io_ctx = io::IOContext {.reader_type = ReaderType::READER_QUERY, + .file_cache_stats = &stats.file_cache_stats}, }; std::vector single_row_loc {row_id}; if (!slot->column_paths().empty()) { diff --git a/be/src/olap/rowset/segment_v2/segment.h b/be/src/olap/rowset/segment_v2/segment.h index ca2fee0e77aa82..441ae3e85e9b3f 100644 --- a/be/src/olap/rowset/segment_v2/segment.h +++ b/be/src/olap/rowset/segment_v2/segment.h @@ -134,9 +134,8 @@ class Segment : public std::enable_shared_from_this, public MetadataAdd } Status lookup_row_key(const Slice& key, const TabletSchema* latest_schema, bool with_seq_col, - bool with_rowid, RowLocation* row_location, - std::string* encoded_seq_value = nullptr, - OlapReaderStatistics* stats = nullptr); + bool with_rowid, RowLocation* row_location, OlapReaderStatistics* stats, + std::string* encoded_seq_value = nullptr); Status read_key_by_rowid(uint32_t row_id, std::string* key); @@ -144,9 +143,9 @@ class Segment : public std::enable_shared_from_this, public MetadataAdd vectorized::MutableColumnPtr& result, OlapReaderStatistics& stats, std::unique_ptr& iterator_hint); - Status load_index(); + Status load_index(OlapReaderStatistics* stats); - Status load_pk_index_and_bf(OlapReaderStatistics* index_load_stats = nullptr); + Status load_pk_index_and_bf(OlapReaderStatistics* stats); void update_healthy_status(Status new_status) { _healthy_status.update(new_status); } // The segment is loaded into SegmentCache and then will load indices, if there are something wrong @@ -227,7 +226,7 @@ class Segment : public std::enable_shared_from_this, public MetadataAdd Status _open(); Status _parse_footer(SegmentFooterPB* footer); Status _create_column_readers(const SegmentFooterPB& footer); - Status _load_pk_bloom_filter(); + Status _load_pk_bloom_filter(OlapReaderStatistics* stats); ColumnReader* _get_column_reader(const TabletColumn& col); // Get Iterator which will read variant root column and extract with paths and types info @@ -305,7 +304,6 @@ class Segment : public std::enable_shared_from_this, public MetadataAdd InvertedIndexFileInfo _idx_file_info; int _be_exec_version = BeExecVersionManager::get_newest_version(); - OlapReaderStatistics* _pk_index_load_stats = nullptr; }; } // namespace segment_v2 diff --git a/be/src/olap/rowset/segment_v2/segment_iterator.cpp b/be/src/olap/rowset/segment_v2/segment_iterator.cpp index 0c54eaa2d6cbaa..ec0f9104e050e7 100644 --- a/be/src/olap/rowset/segment_v2/segment_iterator.cpp +++ b/be/src/olap/rowset/segment_v2/segment_iterator.cpp @@ -839,7 +839,13 @@ bool SegmentIterator::_downgrade_without_index(Status res, bool need_remaining) // such as when index segment files are not generated // above case can downgrade without index query _opts.stats->inverted_index_downgrade_count++; - LOG(INFO) << "will downgrade without index to evaluate predicate, because of res: " << res; + if (!res.is()) { + LOG(INFO) << "will downgrade without index to evaluate predicate, because of res: " + << res; + } else { + VLOG_DEBUG << "will downgrade without index to evaluate predicate, because of res: " + << res; + } return true; } return false; @@ -1181,7 +1187,7 @@ Status SegmentIterator::_lookup_ordinal_from_pk_index(const RowCursor& key, bool bool exact_match = false; std::unique_ptr index_iterator; - RETURN_IF_ERROR(pk_index_reader->new_iterator(&index_iterator)); + RETURN_IF_ERROR(pk_index_reader->new_iterator(&index_iterator, _opts.stats)); Status status = index_iterator->seek_at_or_after(&index_key, &exact_match); if (UNLIKELY(!status.ok())) { @@ -1955,8 +1961,7 @@ Status SegmentIterator::next_batch(vectorized::Block* block) { Status SegmentIterator::_convert_to_expected_type(const std::vector& col_ids) { for (ColumnId i : col_ids) { - if (_current_return_columns[i] == nullptr || _converted_column_ids[i] || - _is_pred_column[i]) { + if (!_current_return_columns[i] || _converted_column_ids[i] || _is_pred_column[i]) { continue; } if (!_segment->same_with_storage_type( @@ -1999,7 +2004,7 @@ Status SegmentIterator::copy_column_data_by_selector(vectorized::IColumn* input_ return Status::RuntimeError("copy_column_data_by_selector nullable mismatch"); } - return input_col_ptr->filter_by_selector(sel_rowid_idx, select_size, output_col); + return input_col_ptr->filter_by_selector(sel_rowid_idx, select_size, output_col.get()); } void SegmentIterator::_clear_iterators() { diff --git a/be/src/olap/rowset/segment_v2/zone_map_index.cpp b/be/src/olap/rowset/segment_v2/zone_map_index.cpp index c2139ff0899090..9249c82aedfdc3 100644 --- a/be/src/olap/rowset/segment_v2/zone_map_index.cpp +++ b/be/src/olap/rowset/segment_v2/zone_map_index.cpp @@ -140,18 +140,21 @@ Status TypedZoneMapIndexWriter::finish(io::FileWriter* file_writer, return writer.finish(meta->mutable_page_zone_maps()); } -Status ZoneMapIndexReader::load(bool use_page_cache, bool kept_in_memory) { +Status ZoneMapIndexReader::load(bool use_page_cache, bool kept_in_memory, + OlapReaderStatistics* index_load_stats) { // TODO yyq: implement a new once flag to avoid status construct. - return _load_once.call([this, use_page_cache, kept_in_memory] { - return _load(use_page_cache, kept_in_memory, std::move(_page_zone_maps_meta)); + return _load_once.call([this, use_page_cache, kept_in_memory, index_load_stats] { + return _load(use_page_cache, kept_in_memory, std::move(_page_zone_maps_meta), + index_load_stats); }); } Status ZoneMapIndexReader::_load(bool use_page_cache, bool kept_in_memory, - std::unique_ptr page_zone_maps_meta) { + std::unique_ptr page_zone_maps_meta, + OlapReaderStatistics* index_load_stats) { IndexedColumnReader reader(_file_reader, *page_zone_maps_meta); - RETURN_IF_ERROR(reader.load(use_page_cache, kept_in_memory)); - IndexedColumnIterator iter(&reader); + RETURN_IF_ERROR(reader.load(use_page_cache, kept_in_memory, index_load_stats)); + IndexedColumnIterator iter(&reader, index_load_stats); _page_zone_maps.resize(reader.num_values()); diff --git a/be/src/olap/rowset/segment_v2/zone_map_index.h b/be/src/olap/rowset/segment_v2/zone_map_index.h index 34869bbbfeea62..04cae12975c5fa 100644 --- a/be/src/olap/rowset/segment_v2/zone_map_index.h +++ b/be/src/olap/rowset/segment_v2/zone_map_index.h @@ -154,14 +154,16 @@ class ZoneMapIndexReader : public MetadataAdder { virtual ~ZoneMapIndexReader(); // load all page zone maps into memory - Status load(bool use_page_cache, bool kept_in_memory); + Status load(bool use_page_cache, bool kept_in_memory, + OlapReaderStatistics* index_load_stats = nullptr); const std::vector& page_zone_maps() const { return _page_zone_maps; } int32_t num_pages() const { return _page_zone_maps.size(); } private: - Status _load(bool use_page_cache, bool kept_in_memory, std::unique_ptr); + Status _load(bool use_page_cache, bool kept_in_memory, std::unique_ptr, + OlapReaderStatistics* index_load_stats); int64_t get_metadata_size() const override; diff --git a/be/src/olap/schema_change.cpp b/be/src/olap/schema_change.cpp index 7f947612eed4ac..658ff05b67f0d6 100644 --- a/be/src/olap/schema_change.cpp +++ b/be/src/olap/schema_change.cpp @@ -337,7 +337,7 @@ Status BlockChanger::change_block(vectorized::Block* ref_block, int result_tmp_column_idx = -1; RETURN_IF_ERROR(ctx->execute(ref_block, &result_tmp_column_idx)); auto& result_tmp_column_def = ref_block->get_by_position(result_tmp_column_idx); - if (result_tmp_column_def.column == nullptr) { + if (!result_tmp_column_def.column) { return Status::Error( "result column={} is nullptr, input expr={}", result_tmp_column_def.name, apache::thrift::ThriftDebugString(*expr)); @@ -430,7 +430,7 @@ Status BlockChanger::_check_cast_valid(vectorized::ColumnPtr input_column, if (input_column->is_nullable() != output_column->is_nullable()) { if (input_column->is_nullable()) { const auto* ref_null_map = - vectorized::check_and_get_column(input_column) + vectorized::check_and_get_column(input_column.get()) ->get_null_map_column() .get_data() .data(); @@ -446,10 +446,12 @@ Status BlockChanger::_check_cast_valid(vectorized::ColumnPtr input_column, } } else { const auto& null_map_column = - vectorized::check_and_get_column(output_column) + vectorized::check_and_get_column( + output_column.get()) ->get_null_map_column(); const auto& nested_column = - vectorized::check_and_get_column(output_column) + vectorized::check_and_get_column( + output_column.get()) ->get_nested_column(); const auto* new_null_map = null_map_column.get_data().data(); @@ -481,12 +483,12 @@ Status BlockChanger::_check_cast_valid(vectorized::ColumnPtr input_column, if (input_column->is_nullable() && output_column->is_nullable()) { const auto* ref_null_map = - vectorized::check_and_get_column(input_column) + vectorized::check_and_get_column(input_column.get()) ->get_null_map_column() .get_data() .data(); const auto* new_null_map = - vectorized::check_and_get_column(output_column) + vectorized::check_and_get_column(output_column.get()) ->get_null_map_column() .get_data() .data(); diff --git a/be/src/olap/tablet.cpp b/be/src/olap/tablet.cpp index c7919b3f8dca24..1758166e76edee 100644 --- a/be/src/olap/tablet.cpp +++ b/be/src/olap/tablet.cpp @@ -2766,7 +2766,7 @@ void Tablet::check_table_size_correctness() { const std::vector& all_rs_metas = _tablet_meta->all_rs_metas(); for (const auto& rs_meta : all_rs_metas) { int64_t total_segment_size = get_segment_file_size(rs_meta); - int64_t total_inverted_index_size = get_inverted_index_file_szie(rs_meta); + int64_t total_inverted_index_size = get_inverted_index_file_size(rs_meta); if (rs_meta->data_disk_size() != total_segment_size || rs_meta->index_disk_size() != total_inverted_index_size || rs_meta->data_disk_size() + rs_meta->index_disk_size() != rs_meta->total_disk_size()) { @@ -2817,7 +2817,7 @@ int64_t Tablet::get_segment_file_size(const RowsetMetaSharedPtr& rs_meta) { return total_segment_size; } -int64_t Tablet::get_inverted_index_file_szie(const RowsetMetaSharedPtr& rs_meta) { +int64_t Tablet::get_inverted_index_file_size(const RowsetMetaSharedPtr& rs_meta) { const auto& fs = rs_meta->fs(); if (!fs) { LOG(WARNING) << "get fs failed, resource_id={}" << rs_meta->resource_id(); diff --git a/be/src/olap/tablet.h b/be/src/olap/tablet.h index d00476f044191c..afe043bf15195b 100644 --- a/be/src/olap/tablet.h +++ b/be/src/olap/tablet.h @@ -214,6 +214,7 @@ class Tablet final : public BaseTablet { std::mutex& get_push_lock() { return _ingest_lock; } std::mutex& get_base_compaction_lock() { return _base_compaction_lock; } std::mutex& get_cumulative_compaction_lock() { return _cumulative_compaction_lock; } + std::shared_mutex& get_meta_store_lock() { return _meta_store_lock; } std::shared_timed_mutex& get_migration_lock() { return _migration_lock; } @@ -531,7 +532,7 @@ class Tablet final : public BaseTablet { void check_table_size_correctness(); std::string get_segment_path(const RowsetMetaSharedPtr& rs_meta, int64_t seg_id); int64_t get_segment_file_size(const RowsetMetaSharedPtr& rs_meta); - int64_t get_inverted_index_file_szie(const RowsetMetaSharedPtr& rs_meta); + int64_t get_inverted_index_file_size(const RowsetMetaSharedPtr& rs_meta); public: static const int64_t K_INVALID_CUMULATIVE_POINT = -1; @@ -588,7 +589,7 @@ class Tablet final : public BaseTablet { std::shared_ptr _cumulative_compaction_policy; std::string_view _cumulative_compaction_type; - // use a seperate thread to check all tablets paths existance + // use a separate thread to check all tablets paths existence std::atomic _is_tablet_path_exists; int64_t _last_missed_version; diff --git a/be/src/olap/task/engine_clone_task.cpp b/be/src/olap/task/engine_clone_task.cpp index fa8d9b8248e3f4..9af3e078d3aefa 100644 --- a/be/src/olap/task/engine_clone_task.cpp +++ b/be/src/olap/task/engine_clone_task.cpp @@ -171,6 +171,16 @@ Status EngineCloneTask::_do_clone() { auto duration = std::chrono::milliseconds(dp->param("duration", 10 * 1000)); std::this_thread::sleep_for(duration); }); + + DBUG_EXECUTE_IF("EngineCloneTask.failed_clone", { + LOG_WARNING("EngineCloneTask.failed_clone") + .tag("tablet_id", _clone_req.tablet_id) + .tag("replica_id", _clone_req.replica_id) + .tag("version", _clone_req.version); + return Status::InternalError( + "in debug point, EngineCloneTask.failed_clone tablet={}, replica={}, version={}", + _clone_req.tablet_id, _clone_req.replica_id, _clone_req.version); + }); Status status = Status::OK(); string src_file_path; TBackend src_host; diff --git a/be/src/pipeline/exec/hashjoin_build_sink.cpp b/be/src/pipeline/exec/hashjoin_build_sink.cpp index e5d6a952d47992..6aca48973678d9 100644 --- a/be/src/pipeline/exec/hashjoin_build_sink.cpp +++ b/be/src/pipeline/exec/hashjoin_build_sink.cpp @@ -292,7 +292,7 @@ Status HashJoinBuildSinkLocalState::_extract_join_column( // update nulllmap and split nested out of ColumnNullable when serialize_null_into_key is false and column is nullable const auto& col_nested = nullable->get_nested_column(); const auto& col_nullmap = nullable->get_null_map_data(); - DCHECK(null_map != nullptr); + DCHECK(null_map); vectorized::VectorizedUtils::update_null_map(null_map->get_data(), col_nullmap); raw_ptrs[i] = &col_nested; } else { diff --git a/be/src/pipeline/exec/hashjoin_probe_operator.cpp b/be/src/pipeline/exec/hashjoin_probe_operator.cpp index 0db525f1bf5222..37ccd6206f3e0f 100644 --- a/be/src/pipeline/exec/hashjoin_probe_operator.cpp +++ b/be/src/pipeline/exec/hashjoin_probe_operator.cpp @@ -371,7 +371,7 @@ Status HashJoinProbeLocalState::_extract_join_column(vectorized::Block& block, _need_null_map_for_probe = _need_probe_null_map(block, res_col_ids); } if (_need_null_map_for_probe) { - if (_null_map_column == nullptr) { + if (!_null_map_column) { _null_map_column = vectorized::ColumnUInt8::create(); } _null_map_column->get_data().assign(block.rows(), (uint8_t)0); @@ -389,7 +389,7 @@ Status HashJoinProbeLocalState::_extract_join_column(vectorized::Block& block, // update nulllmap and split nested out of ColumnNullable when serialize_null_into_key is false and column is nullable const auto& col_nested = nullable->get_nested_column(); const auto& col_nullmap = nullable->get_null_map_data(); - DCHECK(_null_map_column != nullptr); + DCHECK(_null_map_column); vectorized::VectorizedUtils::update_null_map(_null_map_column->get_data(), col_nullmap); _probe_columns[i] = &col_nested; } else { diff --git a/be/src/pipeline/exec/join_probe_operator.cpp b/be/src/pipeline/exec/join_probe_operator.cpp index 11b5b29c8b556b..9a50d76a48ce8c 100644 --- a/be/src/pipeline/exec/join_probe_operator.cpp +++ b/be/src/pipeline/exec/join_probe_operator.cpp @@ -150,7 +150,7 @@ Status JoinProbeLocalState::_build_output_block( /// TODO: maybe need a method to check if a column need to be converted to full /// column. if (is_column_const(*origin_column) || - check_column(origin_column)) { + check_column(origin_column.get())) { auto column_ptr = origin_column->convert_to_full_column_if_const(); insert_column_datas(mutable_columns[i], column_ptr, rows); } else { diff --git a/be/src/pipeline/exec/olap_scan_operator.cpp b/be/src/pipeline/exec/olap_scan_operator.cpp index fa91caffa8ebc4..b1ab62743323c6 100644 --- a/be/src/pipeline/exec/olap_scan_operator.cpp +++ b/be/src/pipeline/exec/olap_scan_operator.cpp @@ -246,9 +246,8 @@ Status OlapScanLocalState::_should_push_down_function_filter(vectorized::Vectori DCHECK(children[1 - i]->type().is_string_type()); std::shared_ptr const_col_wrapper; RETURN_IF_ERROR(children[1 - i]->get_const_col(expr_ctx, &const_col_wrapper)); - if (const vectorized::ColumnConst* const_column = - check_and_get_column( - const_col_wrapper->column_ptr)) { + if (const auto* const_column = check_and_get_column( + const_col_wrapper->column_ptr.get())) { *constant_str = const_column->get_data_at(0); } else { pdt = PushDownType::UNACCEPTABLE; diff --git a/be/src/pipeline/exec/scan_operator.cpp b/be/src/pipeline/exec/scan_operator.cpp index ae4396b22c7eec..a73e1a6db7ccb7 100644 --- a/be/src/pipeline/exec/scan_operator.cpp +++ b/be/src/pipeline/exec/scan_operator.cpp @@ -520,8 +520,8 @@ Status ScanLocalState::_eval_const_conjuncts(vectorized::VExpr* vexpr, if (vexpr->is_constant()) { std::shared_ptr const_col_wrapper; RETURN_IF_ERROR(vexpr->get_const_col(expr_ctx, &const_col_wrapper)); - if (const auto* const_column = - check_and_get_column(const_col_wrapper->column_ptr)) { + if (const auto* const_column = check_and_get_column( + const_col_wrapper->column_ptr.get())) { constant_val = const_cast(const_column->get_data_at(0).data); if (constant_val == nullptr || !*reinterpret_cast(constant_val)) { *pdt = PushDownType::ACCEPTABLE; @@ -530,7 +530,7 @@ Status ScanLocalState::_eval_const_conjuncts(vectorized::VExpr* vexpr, } } else if (const auto* bool_column = check_and_get_column>( - const_col_wrapper->column_ptr)) { + const_col_wrapper->column_ptr.get())) { // TODO: If `vexpr->is_constant()` is true, a const column is expected here. // But now we still don't cover all predicates for const expression. // For example, for query `SELECT col FROM tbl WHERE 'PROMOTION' LIKE 'AAA%'`, @@ -690,7 +690,7 @@ Status ScanLocalState::_should_push_down_binary_predicate( std::shared_ptr const_col_wrapper; RETURN_IF_ERROR(children[1 - i]->get_const_col(expr_ctx, &const_col_wrapper)); if (const auto* const_column = check_and_get_column( - const_col_wrapper->column_ptr)) { + const_col_wrapper->column_ptr.get())) { *slot_ref_child = i; *constant_val = const_column->get_data_at(0); } else { diff --git a/be/src/pipeline/local_exchange/local_exchanger.h b/be/src/pipeline/local_exchange/local_exchanger.h index d6871b2ba97cc3..2ab1c8627228a4 100644 --- a/be/src/pipeline/local_exchange/local_exchanger.h +++ b/be/src/pipeline/local_exchange/local_exchanger.h @@ -124,12 +124,13 @@ template struct BlockQueue { std::atomic eos = false; moodycamel::ConcurrentQueue data_queue; + moodycamel::ProducerToken ptok {data_queue}; BlockQueue() : eos(false), data_queue(moodycamel::ConcurrentQueue()) {} BlockQueue(BlockQueue&& other) : eos(other.eos.load()), data_queue(std::move(other.data_queue)) {} inline bool enqueue(BlockType const& item) { if (!eos) { - if (!data_queue.enqueue(item)) [[unlikely]] { + if (!data_queue.enqueue(ptok, item)) [[unlikely]] { throw Exception(ErrorCode::INTERNAL_ERROR, "Exception occurs in data queue [size = {}] of local exchange.", data_queue.size_approx()); @@ -141,7 +142,7 @@ struct BlockQueue { inline bool enqueue(BlockType&& item) { if (!eos) { - if (!data_queue.enqueue(std::move(item))) [[unlikely]] { + if (!data_queue.enqueue(ptok, std::move(item))) [[unlikely]] { throw Exception(ErrorCode::INTERNAL_ERROR, "Exception occurs in data queue [size = {}] of local exchange.", data_queue.size_approx()); diff --git a/be/src/runtime/snapshot_loader.cpp b/be/src/runtime/snapshot_loader.cpp index b492a929fca3bf..c5b27c823054a4 100644 --- a/be/src/runtime/snapshot_loader.cpp +++ b/be/src/runtime/snapshot_loader.cpp @@ -765,49 +765,68 @@ Status SnapshotLoader::move(const std::string& snapshot_path, TabletSharedPtr ta return Status::InternalError(err_msg); } - if (overwrite) { - std::vector snapshot_files; - RETURN_IF_ERROR(_get_existing_files_from_local(snapshot_path, &snapshot_files)); - - // 1. simply delete the old dir and replace it with the snapshot dir - try { - // This remove seems soft enough, because we already get - // tablet id and schema hash from this path, which - // means this path is a valid path. - std::filesystem::remove_all(tablet_path); - VLOG_CRITICAL << "remove dir: " << tablet_path; - std::filesystem::create_directory(tablet_path); - VLOG_CRITICAL << "re-create dir: " << tablet_path; - } catch (const std::filesystem::filesystem_error& e) { - std::stringstream ss; - ss << "failed to move tablet path: " << tablet_path << ". err: " << e.what(); - LOG(WARNING) << ss.str(); - return Status::InternalError(ss.str()); - } + if (!overwrite) { + throw Exception(Status::FatalError("only support overwrite now")); + } - // link files one by one - // files in snapshot dir will be moved in snapshot clean process - std::vector linked_files; - for (auto& file : snapshot_files) { - auto full_src_path = fmt::format("{}/{}", snapshot_path, file); - auto full_dest_path = fmt::format("{}/{}", tablet_path, file); - if (link(full_src_path.c_str(), full_dest_path.c_str()) != 0) { - LOG(WARNING) << "failed to link file from " << full_src_path << " to " - << full_dest_path << ", err: " << std::strerror(errno); - - // clean the already linked files - for (auto& linked_file : linked_files) { - remove(linked_file.c_str()); - } + // Medium migration/clone/checkpoint/compaction may change or check the + // files and tablet meta, so we need to take these locks. + std::unique_lock migration_lock(tablet->get_migration_lock(), std::try_to_lock); + std::unique_lock base_compact_lock(tablet->get_base_compaction_lock(), std::try_to_lock); + std::unique_lock cumu_compact_lock(tablet->get_cumulative_compaction_lock(), std::try_to_lock); + std::unique_lock cold_compact_lock(tablet->get_cold_compaction_lock(), std::try_to_lock); + std::unique_lock build_idx_lock(tablet->get_build_inverted_index_lock(), std::try_to_lock); + std::unique_lock meta_store_lock(tablet->get_meta_store_lock(), std::try_to_lock); + if (!migration_lock.owns_lock() || !base_compact_lock.owns_lock() || + !cumu_compact_lock.owns_lock() || !cold_compact_lock.owns_lock() || + !build_idx_lock.owns_lock() || !meta_store_lock.owns_lock()) { + // This error should be retryable + auto status = Status::ObtainLockFailed("failed to get tablet locks, tablet: {}", tablet_id); + LOG(WARNING) << status << ", snapshot path: " << snapshot_path + << ", tablet path: " << tablet_path; + return status; + } - return Status::InternalError("move tablet failed"); + std::vector snapshot_files; + RETURN_IF_ERROR(_get_existing_files_from_local(snapshot_path, &snapshot_files)); + + // FIXME: the below logic will demage the tablet files if failed in the middle. + + // 1. simply delete the old dir and replace it with the snapshot dir + try { + // This remove seems soft enough, because we already get + // tablet id and schema hash from this path, which + // means this path is a valid path. + std::filesystem::remove_all(tablet_path); + VLOG_CRITICAL << "remove dir: " << tablet_path; + std::filesystem::create_directory(tablet_path); + VLOG_CRITICAL << "re-create dir: " << tablet_path; + } catch (const std::filesystem::filesystem_error& e) { + std::stringstream ss; + ss << "failed to move tablet path: " << tablet_path << ". err: " << e.what(); + LOG(WARNING) << ss.str(); + return Status::InternalError(ss.str()); + } + + // link files one by one + // files in snapshot dir will be moved in snapshot clean process + std::vector linked_files; + for (auto& file : snapshot_files) { + auto full_src_path = fmt::format("{}/{}", snapshot_path, file); + auto full_dest_path = fmt::format("{}/{}", tablet_path, file); + if (link(full_src_path.c_str(), full_dest_path.c_str()) != 0) { + LOG(WARNING) << "failed to link file from " << full_src_path << " to " << full_dest_path + << ", err: " << std::strerror(errno); + + // clean the already linked files + for (auto& linked_file : linked_files) { + remove(linked_file.c_str()); } - linked_files.push_back(full_dest_path); - VLOG_CRITICAL << "link file from " << full_src_path << " to " << full_dest_path; - } - } else { - throw Exception(Status::FatalError("only support overwrite now")); + return Status::InternalError("move tablet failed"); + } + linked_files.push_back(full_dest_path); + VLOG_CRITICAL << "link file from " << full_src_path << " to " << full_dest_path; } // snapshot loader not need to change tablet uid diff --git a/be/src/runtime/workload_group/workload_group_metrics.cpp b/be/src/runtime/workload_group/workload_group_metrics.cpp index 18ff7aa2f4f185..0f7322b7feb448 100644 --- a/be/src/runtime/workload_group/workload_group_metrics.cpp +++ b/be/src/runtime/workload_group/workload_group_metrics.cpp @@ -36,32 +36,31 @@ WorkloadGroupMetrics::WorkloadGroupMetrics(WorkloadGroup* wg) { _cpu_time_metric = std::make_unique( doris::MetricType::COUNTER, doris::MetricUnit::SECONDS, "workload_group_cpu_time_sec"); - _cpu_time_counter = - (IntAtomicCounter*)(_entity->register_metric(_cpu_time_metric.get())); + _cpu_time_counter = (IntCounter*)(_entity->register_metric(_cpu_time_metric.get())); _mem_used_bytes_metric = std::make_unique( doris::MetricType::COUNTER, doris::MetricUnit::BYTES, "workload_group_mem_used_bytes"); - _mem_used_bytes_counter = (IntAtomicCounter*)(_entity->register_metric( - _mem_used_bytes_metric.get())); + _mem_used_bytes_counter = + (IntCounter*)(_entity->register_metric(_mem_used_bytes_metric.get())); _local_scan_bytes_metric = std::make_unique( doris::MetricType::COUNTER, doris::MetricUnit::BYTES, "workload_group_local_scan_bytes"); - _local_scan_bytes_counter = (IntAtomicCounter*)(_entity->register_metric( - _local_scan_bytes_metric.get())); + _local_scan_bytes_counter = + (IntCounter*)(_entity->register_metric(_local_scan_bytes_metric.get())); _remote_scan_bytes_metric = std::make_unique( doris::MetricType::COUNTER, doris::MetricUnit::BYTES, "workload_group_remote_scan_bytes"); - _remote_scan_bytes_counter = (IntAtomicCounter*)(_entity->register_metric( - _remote_scan_bytes_metric.get())); + _remote_scan_bytes_counter = + (IntCounter*)(_entity->register_metric(_remote_scan_bytes_metric.get())); for (const auto& [key, io_throttle] : wg->_scan_io_throttle_map) { std::unique_ptr metric = std::make_unique( doris::MetricType::COUNTER, doris::MetricUnit::BYTES, "workload_group_local_scan_bytes_" + io_throttle->metric_name()); _local_scan_bytes_counter_map[key] = - (IntAtomicCounter*)(_entity->register_metric(metric.get())); + (IntCounter*)(_entity->register_metric(metric.get())); _local_scan_bytes_metric_map[key] = std::move(metric); } } diff --git a/be/src/runtime/workload_group/workload_group_metrics.h b/be/src/runtime/workload_group/workload_group_metrics.h index e68715df249dee..c761638d115439 100644 --- a/be/src/runtime/workload_group/workload_group_metrics.h +++ b/be/src/runtime/workload_group/workload_group_metrics.h @@ -28,7 +28,7 @@ class WorkloadGroup; template class AtomicCounter; -using IntAtomicCounter = AtomicCounter; +using IntCounter = AtomicCounter; class MetricEntity; struct MetricPrototype; @@ -65,11 +65,11 @@ class WorkloadGroupMetrics { // _local_disk_io_metric is every disk's IO std::map> _local_scan_bytes_metric_map; - IntAtomicCounter* _cpu_time_counter {nullptr}; // used for metric - IntAtomicCounter* _mem_used_bytes_counter {nullptr}; // used for metric - IntAtomicCounter* _local_scan_bytes_counter {nullptr}; // used for metric - IntAtomicCounter* _remote_scan_bytes_counter {nullptr}; // used for metric - std::map _local_scan_bytes_counter_map; // used for metric + IntCounter* _cpu_time_counter {nullptr}; // used for metric + IntCounter* _mem_used_bytes_counter {nullptr}; // used for metric + IntCounter* _local_scan_bytes_counter {nullptr}; // used for metric + IntCounter* _remote_scan_bytes_counter {nullptr}; // used for metric + std::map _local_scan_bytes_counter_map; // used for metric std::atomic _cpu_time_nanos {0}; std::atomic _last_cpu_time_nanos {0}; diff --git a/be/src/util/core_local.cpp b/be/src/util/core_local.cpp deleted file mode 100644 index 1c4b1dd04715b4..00000000000000 --- a/be/src/util/core_local.cpp +++ /dev/null @@ -1,129 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -#include "util/core_local.h" - -#include -#include -#include -#include - -#include "common/compiler_util.h" // IWYU pragma: keep -#include "common/logging.h" -#include "util/spinlock.h" -#include "util/sse_util.hpp" - -namespace doris { - -constexpr int BLOCK_SIZE = 4096; -struct alignas(CACHE_LINE_SIZE) CoreDataBlock { - void* at(size_t offset) { return data + offset; } - char data[BLOCK_SIZE]; - - static void* operator new(size_t nbytes) { - void* p = nullptr; - if (posix_memalign(&p, alignof(CoreDataBlock), nbytes) == 0) { - return p; - } - throw std::bad_alloc(); - } - - static void operator delete(void* p) { free(p); } -}; - -template -class CoreDataAllocatorImpl : public CoreDataAllocator { -public: - virtual ~CoreDataAllocatorImpl(); - void* get_or_create(size_t id) override { - size_t block_id = id / ELEMENTS_PER_BLOCK; - { - std::lock_guard l(_lock); - if (block_id >= _blocks.size()) { - _blocks.resize(block_id + 1); - } - } - CoreDataBlock* block = _blocks[block_id]; - if (block == nullptr) { - std::lock_guard l(_lock); - block = _blocks[block_id]; - if (block == nullptr) { - block = new CoreDataBlock(); - _blocks[block_id] = block; - } - } - size_t offset = (id % ELEMENTS_PER_BLOCK) * ELEMENT_BYTES; - return block->at(offset); - } - -private: - static constexpr int ELEMENTS_PER_BLOCK = BLOCK_SIZE / ELEMENT_BYTES; - SpinLock _lock; // lock to protect the modification of _blocks - std::vector _blocks; -}; - -template -CoreDataAllocatorImpl::~CoreDataAllocatorImpl() { - for (auto block : _blocks) { - delete block; - } -} - -CoreDataAllocatorFactory* CoreDataAllocatorFactory::instance() { - static CoreDataAllocatorFactory _s_instance; - return &_s_instance; -} - -CoreDataAllocator* CoreDataAllocatorFactory::get_allocator(size_t cpu_idx, size_t data_bytes) { - std::lock_guard l(_lock); - auto pair = std::make_pair(cpu_idx, data_bytes); - auto it = _allocators.find(pair); - if (it != std::end(_allocators)) { - return it->second; - } - CoreDataAllocator* allocator = nullptr; - switch (data_bytes) { - case 1: - allocator = new CoreDataAllocatorImpl<1>(); - break; - case 2: - allocator = new CoreDataAllocatorImpl<2>(); - break; - case 3: - case 4: - allocator = new CoreDataAllocatorImpl<4>(); - break; - case 5: - case 6: - case 7: - case 8: - allocator = new CoreDataAllocatorImpl<8>(); - break; - default: - DCHECK(false) << "don't support core local value for this size, size=" << data_bytes; - } - _allocators.emplace(pair, allocator); - return allocator; -} - -CoreDataAllocatorFactory::~CoreDataAllocatorFactory() { - for (auto& it : _allocators) { - delete it.second; - } -} - -} // namespace doris diff --git a/be/src/util/core_local.h b/be/src/util/core_local.h deleted file mode 100644 index 1610ae5a0bb046..00000000000000 --- a/be/src/util/core_local.h +++ /dev/null @@ -1,162 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -#pragma once - -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include - -#include "common/compiler_util.h" // IWYU pragma: keep - -namespace doris { - -class CoreDataAllocator { -public: - virtual ~CoreDataAllocator() {} - virtual void* get_or_create(size_t id) = 0; -}; - -class CoreDataAllocatorFactory { -public: - CoreDataAllocatorFactory() {} - ~CoreDataAllocatorFactory(); - CoreDataAllocator* get_allocator(size_t cpu_id, size_t data_bytes); - static CoreDataAllocatorFactory* instance(); - -private: - DISALLOW_COPY_AND_ASSIGN(CoreDataAllocatorFactory); - -private: - std::mutex _lock; - std::map, CoreDataAllocator*> _allocators; -}; - -template -class CoreLocalValueController { -public: - CoreLocalValueController() { - int num_cpus = static_cast(std::thread::hardware_concurrency()); - _size = 8; - while (_size < num_cpus) { - _size <<= 1; - } - _allocators.resize(_size, nullptr); - for (int i = 0; i < _size; ++i) { - _allocators[i] = CoreDataAllocatorFactory::instance()->get_allocator(i, sizeof(T)); - } - } - - ~CoreLocalValueController() {} - - int get_id() { - std::lock_guard l(_lock); - int id = 0; - if (_free_ids.empty()) { - id = _next_id++; - } else { - id = _free_ids.back(); - _free_ids.pop_back(); - } - return id; - } - void reclaim_id(int id) { - std::lock_guard l(_lock); - _free_ids.push_back(id); - } - size_t size() const { return _size; } - CoreDataAllocator* allocator(int i) const { return _allocators[i]; } - - static CoreLocalValueController* instance() { - static CoreLocalValueController _s_instance; - return &_s_instance; - } - -private: - DISALLOW_COPY_AND_ASSIGN(CoreLocalValueController); - -private: - std::mutex _lock; - int _next_id = 0; - std::deque _free_ids; - std::vector _allocators; - size_t _size; -}; - -template -class CoreLocalValue { -public: - CoreLocalValue(const T init_value = T()) { - CoreLocalValueController* controller = CoreLocalValueController::instance(); - _id = controller->get_id(); - _size = controller->size(); - _values.resize(_size, nullptr); - for (int i = 0; i < _size; ++i) { - void* ptr = controller->allocator(i)->get_or_create(_id); - _values[i] = new (ptr) T(init_value); - } - } - - ~CoreLocalValue() { - for (int i = 0; i < _size; ++i) { - _values[i]->~T(); - } - CoreLocalValueController::instance()->reclaim_id(_id); - } - - size_t size() const { return _size; } - T* access() const { -#ifdef __APPLE__ - size_t cpu_id = 0; -#else - size_t cpu_id = sched_getcpu(); -#endif - if (cpu_id >= _size) { - cpu_id &= _size - 1; - } - return access_at_core(cpu_id); - } - T* access_at_core(size_t core_idx) const { return _values[core_idx]; } - - inline void reset() { - for (int i = 0; i < _size; ++i) { - _values[i]->~T(); - } - _values.clear(); - _values.resize(_size, nullptr); - CoreLocalValueController* controller = CoreLocalValueController::instance(); - for (int i = 0; i < _size; ++i) { - void* ptr = controller->allocator(i)->get_or_create(_id); - _values[i] = new (ptr) T(); - } - } - -private: - int _id = -1; - size_t _size = 0; - std::vector _values; -}; - -} // namespace doris diff --git a/be/src/util/datetype_cast.hpp b/be/src/util/datetype_cast.hpp index 495631ea7e376c..5c187ded7b729c 100644 --- a/be/src/util/datetype_cast.hpp +++ b/be/src/util/datetype_cast.hpp @@ -29,8 +29,10 @@ /* * We use these function family to clarify our types of datelike type. for example: * DataTypeDate -------------------> ColumnDate -----------------------> Int64 - * | TypeToColumn ValueTypeOfColumn - * | TypeToValueType + * | | TypeToColumn ValueTypeOfColumn | + * | ↘--------------------------------------------------------------↗ + * | ::FieldType + * ↓ TypeToValueType * VecDateTimeValue */ namespace doris::date_cast { @@ -102,6 +104,7 @@ constexpr bool IsV1() { std::is_same_v); } +// only for datelike types. template constexpr bool IsV2() { return !IsV1(); diff --git a/be/src/util/doris_metrics.cpp b/be/src/util/doris_metrics.cpp index e9d4f31e5ca137..e77ee1c36b6b89 100644 --- a/be/src/util/doris_metrics.cpp +++ b/be/src/util/doris_metrics.cpp @@ -311,17 +311,17 @@ DorisMetrics::DorisMetrics() : _metric_registry(_s_registry_name) { INT_GAUGE_METRIC_REGISTER(_server_metric_entity, broker_file_open_reading); INT_GAUGE_METRIC_REGISTER(_server_metric_entity, local_file_open_writing); INT_GAUGE_METRIC_REGISTER(_server_metric_entity, s3_file_open_writing); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, num_io_bytes_read_total); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, num_io_bytes_read_from_cache); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, num_io_bytes_read_from_remote); - - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, query_ctx_cnt); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_ctx_cnt); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_cnt); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_cnt); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_queued); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_running); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_submit_failed); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, num_io_bytes_read_total); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, num_io_bytes_read_from_cache); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, num_io_bytes_read_from_remote); + + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, query_ctx_cnt); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_ctx_cnt); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_cnt); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_cnt); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_queued); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_running); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_submit_failed); } void DorisMetrics::initialize(bool init_system_metrics, const std::set& disk_devices, diff --git a/be/src/util/doris_metrics.h b/be/src/util/doris_metrics.h index 31b907eec9ed6c..d089758c21c93f 100644 --- a/be/src/util/doris_metrics.h +++ b/be/src/util/doris_metrics.h @@ -236,17 +236,17 @@ class DorisMetrics { UIntGauge* group_local_scan_thread_pool_queue_size = nullptr; UIntGauge* group_local_scan_thread_pool_thread_num = nullptr; - IntAtomicCounter* num_io_bytes_read_total = nullptr; - IntAtomicCounter* num_io_bytes_read_from_cache = nullptr; - IntAtomicCounter* num_io_bytes_read_from_remote = nullptr; - - IntAtomicCounter* query_ctx_cnt = nullptr; - IntAtomicCounter* scanner_ctx_cnt = nullptr; - IntAtomicCounter* scanner_cnt = nullptr; - IntAtomicCounter* scanner_task_cnt = nullptr; - IntAtomicCounter* scanner_task_queued = nullptr; - IntAtomicCounter* scanner_task_submit_failed = nullptr; - IntAtomicCounter* scanner_task_running = nullptr; + IntCounter* num_io_bytes_read_total = nullptr; + IntCounter* num_io_bytes_read_from_cache = nullptr; + IntCounter* num_io_bytes_read_from_remote = nullptr; + + IntCounter* query_ctx_cnt = nullptr; + IntCounter* scanner_ctx_cnt = nullptr; + IntCounter* scanner_cnt = nullptr; + IntCounter* scanner_task_cnt = nullptr; + IntCounter* scanner_task_queued = nullptr; + IntCounter* scanner_task_submit_failed = nullptr; + IntCounter* scanner_task_running = nullptr; static DorisMetrics* instance() { static DorisMetrics instance; diff --git a/be/src/util/metrics.h b/be/src/util/metrics.h index ac7e69a4ef8ab4..cb49884fefb60b 100644 --- a/be/src/util/metrics.h +++ b/be/src/util/metrics.h @@ -19,21 +19,17 @@ #include #include -#include -#include #include #include #include #include #include -#include #include #include #include #include -#include "util/core_local.h" #include "util/histogram.h" namespace doris { @@ -67,8 +63,8 @@ using Labels = std::unordered_map; class Metric { public: - Metric() {} - virtual ~Metric() {} + Metric() = default; + virtual ~Metric() = default; virtual std::string to_string() const = 0; virtual std::string to_prometheus(const std::string& display_name, const Labels& entity_labels, const Labels& metric_labels) const; @@ -83,7 +79,7 @@ template class AtomicMetric : public Metric { public: AtomicMetric() : _value(T()) {} - virtual ~AtomicMetric() {} + virtual ~AtomicMetric() = default; std::string to_string() const override { return std::to_string(value()); } @@ -101,81 +97,10 @@ class AtomicMetric : public Metric { std::atomic _value; }; -template -class LockSimpleMetric : public Metric { -public: - LockSimpleMetric() : _value(T()) {} - virtual ~LockSimpleMetric() {} - - std::string to_string() const override { return std::to_string(value()); } - - T value() const { - std::lock_guard l(_lock); - return _value; - } - - void increment(const T& delta) { - std::lock_guard l(this->_lock); - _value += delta; - } - - void set_value(const T& value) { - std::lock_guard l(this->_lock); - _value = value; - } - - rj::Value to_json_value(rj::Document::AllocatorType& allocator) const override { - return rj::Value(value()); - } - -protected: - // We use std::mutex instead of std::atomic is because atomic don't support - // double's fetch_add - // TODO(zc): If this is atomic is bottleneck, we change to thread local. - // performance: on Intel(R) Xeon(R) CPU E5-2450 int64_t - // original type: 2ns/op - // single thread std::mutex: 26ns/op - // multiple thread(8) std::mutex: 2500ns/op - mutable std::mutex _lock; - T _value; -}; - -template -class CoreLocalCounter : public Metric { -public: - CoreLocalCounter() {} - virtual ~CoreLocalCounter() {} - - std::string to_string() const override { - std::stringstream ss; - ss << value(); - return ss.str(); - } - - T value() const { - T sum = 0; - for (int i = 0; i < _value.size(); ++i) { - sum += *_value.access_at_core(i); - } - return sum; - } - - void increment(const T& delta) { __sync_fetch_and_add(_value.access(), delta); } - - void reset() { _value.reset(); } - - rj::Value to_json_value(rj::Document::AllocatorType& allocator) const override { - return rj::Value(value()); - } - -protected: - CoreLocalValue _value; -}; - class HistogramMetric : public Metric { public: - HistogramMetric() {} - virtual ~HistogramMetric() {} + HistogramMetric() = default; + virtual ~HistogramMetric() = default; HistogramMetric(const HistogramMetric&) = delete; HistogramMetric& operator=(const HistogramMetric&) = delete; @@ -208,41 +133,25 @@ class HistogramMetric : public Metric { template class AtomicCounter : public AtomicMetric { public: - AtomicCounter() {} - virtual ~AtomicCounter() {} + AtomicCounter() = default; + virtual ~AtomicCounter() = default; }; template class AtomicGauge : public AtomicMetric { public: AtomicGauge() : AtomicMetric() {} - virtual ~AtomicGauge() {} -}; - -template -class LockCounter : public LockSimpleMetric { -public: - LockCounter() : LockSimpleMetric() {} - virtual ~LockCounter() {} -}; - -// This can only used for trival type -template -class LockGauge : public LockSimpleMetric { -public: - LockGauge() : LockSimpleMetric() {} - virtual ~LockGauge() {} + virtual ~AtomicGauge() = default; }; -using IntCounter = CoreLocalCounter; -using IntAtomicCounter = AtomicCounter; -using UIntCounter = CoreLocalCounter; -using DoubleCounter = LockCounter; +using IntCounter = AtomicCounter; +using UIntCounter = AtomicCounter; +using DoubleCounter = AtomicCounter; using IntGauge = AtomicGauge; using UIntGauge = AtomicGauge; -using DoubleGauge = LockGauge; - +using DoubleGauge = AtomicGauge; using Labels = std::unordered_map; + struct MetricPrototype { public: MetricPrototype(MetricType type_, MetricUnit unit_, std::string name_, @@ -302,15 +211,12 @@ struct MetricPrototype { #define INT_GAUGE_METRIC_REGISTER(entity, metric) \ metric = (IntGauge*)(entity->register_metric(&METRIC_##metric)) -#define INT_DOUBLE_METRIC_REGISTER(entity, metric) \ +#define DOUBLE_GAUGE_METRIC_REGISTER(entity, metric) \ metric = (DoubleGauge*)(entity->register_metric(&METRIC_##metric)) #define INT_UGAUGE_METRIC_REGISTER(entity, metric) \ metric = (UIntGauge*)(entity->register_metric(&METRIC_##metric)) -#define INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, metric) \ - metric = (IntAtomicCounter*)(entity->register_metric(&METRIC_##metric)) - #define HISTOGRAM_METRIC_REGISTER(entity, metric) \ metric = (HistogramMetric*)(entity->register_metric(&METRIC_##metric)) @@ -338,8 +244,8 @@ enum class MetricEntityType { kServer, kTablet }; class MetricEntity { public: - MetricEntity(MetricEntityType type, const std::string& name, const Labels& labels) - : _type(type), _name(name), _labels(labels) {} + MetricEntity(MetricEntityType type, std::string name, Labels labels) + : _type(type), _name(std::move(name)), _labels(std::move(labels)) {} ~MetricEntity() { for (auto& metric : _metrics) { delete metric.second; @@ -401,7 +307,7 @@ using EntityMetricsByType = class MetricRegistry { public: - MetricRegistry(const std::string& name) : _name(name) {} + MetricRegistry(std::string name) : _name(std::move(name)) {} ~MetricRegistry(); std::shared_ptr register_entity( diff --git a/be/src/util/system_metrics.cpp b/be/src/util/system_metrics.cpp index fc2cdcc9262b31..ecbb4d580360c4 100644 --- a/be/src/util/system_metrics.cpp +++ b/be/src/util/system_metrics.cpp @@ -44,12 +44,12 @@ DEFINE_COUNTER_METRIC_PROTOTYPE_2ARG(avail_cpu_num, MetricUnit::NOUNIT); DEFINE_COUNTER_METRIC_PROTOTYPE_2ARG(host_cpu_num, MetricUnit::NOUNIT); struct CpuNumberMetrics { CpuNumberMetrics(MetricEntity* ent) : entity(ent) { - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, host_cpu_num); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, avail_cpu_num); + INT_COUNTER_METRIC_REGISTER(entity, host_cpu_num); + INT_COUNTER_METRIC_REGISTER(entity, avail_cpu_num); } - IntAtomicCounter* host_cpu_num {nullptr}; - IntAtomicCounter* avail_cpu_num {nullptr}; + IntCounter* host_cpu_num {nullptr}; + IntCounter* avail_cpu_num {nullptr}; MetricEntity* entity = nullptr; }; @@ -70,16 +70,16 @@ DEFINE_CPU_COUNTER_METRIC(guest_nice); // /proc/stat: http://www.linuxhowtos.org/System/procstat.htm struct CpuMetrics { CpuMetrics(MetricEntity* ent) : entity(ent) { - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_user); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_nice); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_system); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_idle); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_iowait); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_irq); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_soft_irq); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_steal); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_guest); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_guest_nice); + INT_COUNTER_METRIC_REGISTER(entity, cpu_user); + INT_COUNTER_METRIC_REGISTER(entity, cpu_nice); + INT_COUNTER_METRIC_REGISTER(entity, cpu_system); + INT_COUNTER_METRIC_REGISTER(entity, cpu_idle); + INT_COUNTER_METRIC_REGISTER(entity, cpu_iowait); + INT_COUNTER_METRIC_REGISTER(entity, cpu_irq); + INT_COUNTER_METRIC_REGISTER(entity, cpu_soft_irq); + INT_COUNTER_METRIC_REGISTER(entity, cpu_steal); + INT_COUNTER_METRIC_REGISTER(entity, cpu_guest); + INT_COUNTER_METRIC_REGISTER(entity, cpu_guest_nice); metrics[0] = cpu_user; metrics[1] = cpu_nice; @@ -96,18 +96,18 @@ struct CpuMetrics { static constexpr int cpu_num_metrics = 10; MetricEntity* entity = nullptr; - IntAtomicCounter* cpu_user; - IntAtomicCounter* cpu_nice; - IntAtomicCounter* cpu_system; - IntAtomicCounter* cpu_idle; - IntAtomicCounter* cpu_iowait; - IntAtomicCounter* cpu_irq; - IntAtomicCounter* cpu_soft_irq; - IntAtomicCounter* cpu_steal; - IntAtomicCounter* cpu_guest; - IntAtomicCounter* cpu_guest_nice; - - IntAtomicCounter* metrics[cpu_num_metrics]; + IntCounter* cpu_user; + IntCounter* cpu_nice; + IntCounter* cpu_system; + IntCounter* cpu_idle; + IntCounter* cpu_iowait; + IntCounter* cpu_irq; + IntCounter* cpu_soft_irq; + IntCounter* cpu_steal; + IntCounter* cpu_guest; + IntCounter* cpu_guest_nice; + + IntCounter* metrics[cpu_num_metrics]; }; #define DEFINE_MEMORY_GAUGE_METRIC(metric, unit) \ @@ -216,25 +216,25 @@ DEFINE_DISK_COUNTER_METRIC(io_time_weigthed, MetricUnit::MILLISECONDS); struct DiskMetrics { DiskMetrics(MetricEntity* ent) : entity(ent) { - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_reads_completed); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_bytes_read); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_read_time_ms); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_writes_completed); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_bytes_written); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_write_time_ms); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_io_time_ms); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_io_time_weigthed); + INT_COUNTER_METRIC_REGISTER(entity, disk_reads_completed); + INT_COUNTER_METRIC_REGISTER(entity, disk_bytes_read); + INT_COUNTER_METRIC_REGISTER(entity, disk_read_time_ms); + INT_COUNTER_METRIC_REGISTER(entity, disk_writes_completed); + INT_COUNTER_METRIC_REGISTER(entity, disk_bytes_written); + INT_COUNTER_METRIC_REGISTER(entity, disk_write_time_ms); + INT_COUNTER_METRIC_REGISTER(entity, disk_io_time_ms); + INT_COUNTER_METRIC_REGISTER(entity, disk_io_time_weigthed); } MetricEntity* entity = nullptr; - IntAtomicCounter* disk_reads_completed; - IntAtomicCounter* disk_bytes_read; - IntAtomicCounter* disk_read_time_ms; - IntAtomicCounter* disk_writes_completed; - IntAtomicCounter* disk_bytes_written; - IntAtomicCounter* disk_write_time_ms; - IntAtomicCounter* disk_io_time_ms; - IntAtomicCounter* disk_io_time_weigthed; + IntCounter* disk_reads_completed; + IntCounter* disk_bytes_read; + IntCounter* disk_read_time_ms; + IntCounter* disk_writes_completed; + IntCounter* disk_bytes_written; + IntCounter* disk_write_time_ms; + IntCounter* disk_io_time_ms; + IntCounter* disk_io_time_weigthed; }; #define DEFINE_NETWORK_COUNTER_METRIC(metric, unit) \ @@ -246,17 +246,17 @@ DEFINE_NETWORK_COUNTER_METRIC(send_packets, MetricUnit::PACKETS); struct NetworkMetrics { NetworkMetrics(MetricEntity* ent) : entity(ent) { - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, network_receive_bytes); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, network_receive_packets); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, network_send_bytes); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, network_send_packets); + INT_COUNTER_METRIC_REGISTER(entity, network_receive_bytes); + INT_COUNTER_METRIC_REGISTER(entity, network_receive_packets); + INT_COUNTER_METRIC_REGISTER(entity, network_send_bytes); + INT_COUNTER_METRIC_REGISTER(entity, network_send_packets); } MetricEntity* entity = nullptr; - IntAtomicCounter* network_receive_bytes; - IntAtomicCounter* network_receive_packets; - IntAtomicCounter* network_send_bytes; - IntAtomicCounter* network_send_packets; + IntCounter* network_receive_bytes; + IntCounter* network_receive_packets; + IntCounter* network_send_bytes; + IntCounter* network_send_packets; }; #define DEFINE_SNMP_COUNTER_METRIC(metric, unit, desc) \ @@ -270,17 +270,17 @@ DEFINE_SNMP_COUNTER_METRIC(tcp_out_segs, MetricUnit::NOUNIT, "All send TCP packe // metrics read from /proc/net/snmp struct SnmpMetrics { SnmpMetrics(MetricEntity* ent) : entity(ent) { - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, snmp_tcp_in_errs); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, snmp_tcp_retrans_segs); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, snmp_tcp_in_segs); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, snmp_tcp_out_segs); + INT_COUNTER_METRIC_REGISTER(entity, snmp_tcp_in_errs); + INT_COUNTER_METRIC_REGISTER(entity, snmp_tcp_retrans_segs); + INT_COUNTER_METRIC_REGISTER(entity, snmp_tcp_in_segs); + INT_COUNTER_METRIC_REGISTER(entity, snmp_tcp_out_segs); } MetricEntity* entity = nullptr; - IntAtomicCounter* snmp_tcp_in_errs; - IntAtomicCounter* snmp_tcp_retrans_segs; - IntAtomicCounter* snmp_tcp_in_segs; - IntAtomicCounter* snmp_tcp_out_segs; + IntCounter* snmp_tcp_in_errs; + IntCounter* snmp_tcp_retrans_segs; + IntCounter* snmp_tcp_in_segs; + IntCounter* snmp_tcp_out_segs; }; #define DEFINE_FD_COUNTER_METRIC(metric, unit) \ @@ -308,9 +308,9 @@ DEFINE_LOAD_AVERAGE_DOUBLE_METRIC(15_minutes); struct LoadAverageMetrics { LoadAverageMetrics(MetricEntity* ent) : entity(ent) { - INT_DOUBLE_METRIC_REGISTER(entity, load_average_1_minutes); - INT_DOUBLE_METRIC_REGISTER(entity, load_average_5_minutes); - INT_DOUBLE_METRIC_REGISTER(entity, load_average_15_minutes); + DOUBLE_GAUGE_METRIC_REGISTER(entity, load_average_1_minutes); + DOUBLE_GAUGE_METRIC_REGISTER(entity, load_average_5_minutes); + DOUBLE_GAUGE_METRIC_REGISTER(entity, load_average_15_minutes); } MetricEntity* entity = nullptr; @@ -329,18 +329,18 @@ DEFINE_PROC_STAT_COUNTER_METRIC(procs_blocked); struct ProcMetrics { ProcMetrics(MetricEntity* ent) : entity(ent) { - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, proc_interrupt); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, proc_ctxt_switch); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, proc_procs_running); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, proc_procs_blocked); + INT_COUNTER_METRIC_REGISTER(entity, proc_interrupt); + INT_COUNTER_METRIC_REGISTER(entity, proc_ctxt_switch); + INT_COUNTER_METRIC_REGISTER(entity, proc_procs_running); + INT_COUNTER_METRIC_REGISTER(entity, proc_procs_blocked); } MetricEntity* entity = nullptr; - IntAtomicCounter* proc_interrupt; - IntAtomicCounter* proc_ctxt_switch; - IntAtomicCounter* proc_procs_running; - IntAtomicCounter* proc_procs_blocked; + IntCounter* proc_interrupt; + IntCounter* proc_ctxt_switch; + IntCounter* proc_procs_running; + IntCounter* proc_procs_blocked; }; DEFINE_GAUGE_CORE_METRIC_PROTOTYPE_2ARG(max_disk_io_util_percent, MetricUnit::PERCENT); diff --git a/be/src/vec/aggregate_functions/aggregate_function_window.h b/be/src/vec/aggregate_functions/aggregate_function_window.h index 0cef4c82d3dbfe..5d449318b7d2f5 100644 --- a/be/src/vec/aggregate_functions/aggregate_function_window.h +++ b/be/src/vec/aggregate_functions/aggregate_function_window.h @@ -402,7 +402,7 @@ struct LeadLagData { if (nullable_column->is_null_at(0)) { _default_value.reset(); } else { - _default_value.set_value(nullable_column->get_nested_column_ptr(), 0); + _default_value.set_value(nullable_column->get_nested_column_ptr().get(), 0); } } else { _default_value.set_value(column, 0); diff --git a/be/src/vec/columns/column_object.cpp b/be/src/vec/columns/column_object.cpp index 3d6a3e44436d29..4300725cacaf9c 100644 --- a/be/src/vec/columns/column_object.cpp +++ b/be/src/vec/columns/column_object.cpp @@ -1484,7 +1484,7 @@ Status ColumnObject::serialize_one_row_to_json_format(size_t row, rapidjson::Str #endif for (const auto& subcolumn : subcolumns) { RETURN_IF_ERROR(find_and_set_leave_value( - subcolumn->data.get_finalized_column_ptr(), subcolumn->path, + subcolumn->data.get_finalized_column_ptr().get(), subcolumn->path, subcolumn->data.get_least_common_type_serde(), subcolumn->data.get_least_common_type(), subcolumn->data.least_common_type.get_base_type_id(), root, @@ -1558,7 +1558,7 @@ Status ColumnObject::merge_sparse_to_root_column() { continue; } bool succ = find_and_set_leave_value( - column, subcolumn->path, subcolumn->data.get_least_common_type_serde(), + column.get(), subcolumn->path, subcolumn->data.get_least_common_type_serde(), subcolumn->data.get_least_common_type(), subcolumn->data.least_common_type.get_base_type_id(), root, doc_structure->GetAllocator(), mem_pool, i); @@ -1705,7 +1705,7 @@ bool ColumnObject::empty() const { } ColumnPtr get_base_column_of_array(const ColumnPtr& column) { - if (const auto* column_array = check_and_get_column(column)) { + if (const auto* column_array = check_and_get_column(column.get())) { return column_array->get_data_ptr(); } return column; diff --git a/be/src/vec/common/cow.h b/be/src/vec/common/cow.h index 95df7694f227d9..4970f649d32e85 100644 --- a/be/src/vec/common/cow.h +++ b/be/src/vec/common/cow.h @@ -203,8 +203,6 @@ class COW { operator bool() const { return t != nullptr; } - operator T*() const { return t; } - private: T* t = nullptr; }; @@ -346,8 +344,8 @@ class COW { operator const immutable_ptr&() const { return value; } operator immutable_ptr&() { return value; } - operator bool() const { return value != nullptr; } - bool operator!() const { return value == nullptr; } + operator bool() const { return value.get() != nullptr; } + bool operator!() const { return value.get() == nullptr; } bool operator==(const chameleon_ptr& rhs) const { return value == rhs.value; } bool operator!=(const chameleon_ptr& rhs) const { return value != rhs.value; } diff --git a/be/src/vec/common/typeid_cast.h b/be/src/vec/common/typeid_cast.h index e135ef3309d2ec..3f81586a707c33 100644 --- a/be/src/vec/common/typeid_cast.h +++ b/be/src/vec/common/typeid_cast.h @@ -20,14 +20,11 @@ #pragma once -#include #include -#include #include #include "common/exception.h" #include "common/status.h" -#include "vec/common/demangle.h" /** Checks type by comparing typeid. * The exact match of the type is checked. That is, cast to the ancestor will be unsuccessful. diff --git a/be/src/vec/exec/format/column_type_convert.cpp b/be/src/vec/exec/format/column_type_convert.cpp index a2c226c91d6799..0442158b690c39 100644 --- a/be/src/vec/exec/format/column_type_convert.cpp +++ b/be/src/vec/exec/format/column_type_convert.cpp @@ -99,7 +99,7 @@ ColumnPtr ColumnTypeConverter::get_column(const TypeDescriptor& src_type, Column return dst_column; } - if (_cached_src_column == nullptr) { + if (!_cached_src_column) { _cached_src_type = DataTypeFactory::instance().create_data_type(src_type, dst_type->is_nullable()); _cached_src_column = diff --git a/be/src/vec/exec/format/csv/csv_reader.cpp b/be/src/vec/exec/format/csv/csv_reader.cpp index b27bb050dc6e0c..d4a2dcfc7f3503 100644 --- a/be/src/vec/exec/format/csv/csv_reader.cpp +++ b/be/src/vec/exec/format/csv/csv_reader.cpp @@ -657,7 +657,7 @@ Status CsvReader::_fill_dest_columns(const Slice& line, Block* block, col_idx < _split_values.size() ? _split_values[col_idx] : _s_null_slice; Slice slice {value.data, value.size}; - IColumn* col_ptr = columns[i]; + IColumn* col_ptr = columns[i].get(); if (!_is_load) { col_ptr = const_cast( block->get_by_position(_file_slot_idx_map[i]).column.get()); @@ -700,7 +700,7 @@ Status CsvReader::_fill_dest_columns(const Slice& line, Block* block, Status CsvReader::_fill_empty_line(Block* block, std::vector& columns, size_t* rows) { for (int i = 0; i < _file_slot_descs.size(); ++i) { - IColumn* col_ptr = columns[i]; + IColumn* col_ptr = columns[i].get(); if (!_is_load) { col_ptr = const_cast( block->get_by_position(_file_slot_idx_map[i]).column.get()); diff --git a/be/src/vec/exec/format/json/new_json_reader.cpp b/be/src/vec/exec/format/json/new_json_reader.cpp index d79e86520741cd..adb22d588f53d3 100644 --- a/be/src/vec/exec/format/json/new_json_reader.cpp +++ b/be/src/vec/exec/format/json/new_json_reader.cpp @@ -886,7 +886,7 @@ Status NewJsonReader::_write_data_to_column(rapidjson::Value::ConstValueIterator if (column_ptr->is_nullable()) { nullable_column = reinterpret_cast(column_ptr); - data_column_ptr = nullable_column->get_nested_column().get_ptr(); + data_column_ptr = nullable_column->get_nested_column().get_ptr().get(); data_serde = serde->get_nested_serdes()[0]; if (value_is_null) { @@ -1010,7 +1010,8 @@ Status NewJsonReader::_write_data_to_column(rapidjson::Value::ConstValueIterator const auto& sub_col_type = type_desc.children[sub_col_idx]; RETURN_IF_ERROR(_write_data_to_column( - sub_value, sub_col_type, struct_column_ptr->get_column(sub_col_idx).get_ptr(), + sub_value, sub_col_type, + struct_column_ptr->get_column(sub_col_idx).get_ptr().get(), column_name + "." + type_desc.field_names[sub_col_idx], sub_serdes[sub_col_idx], valid)); } @@ -1026,12 +1027,12 @@ Status NewJsonReader::_write_data_to_column(rapidjson::Value::ConstValueIterator for (const auto& member_value : object_value) { RETURN_IF_ERROR(_write_data_to_column( &member_value.name, type_desc.children[0], - map_column_ptr->get_keys_ptr()->assume_mutable()->get_ptr(), + map_column_ptr->get_keys_ptr()->assume_mutable()->get_ptr().get(), column_name + ".key", sub_serdes[0], valid)); RETURN_IF_ERROR(_write_data_to_column( &member_value.value, type_desc.children[1], - map_column_ptr->get_values_ptr()->assume_mutable()->get_ptr(), + map_column_ptr->get_values_ptr()->assume_mutable()->get_ptr().get(), column_name + ".value", sub_serdes[1], valid)); } @@ -1048,7 +1049,7 @@ Status NewJsonReader::_write_data_to_column(rapidjson::Value::ConstValueIterator for (const auto& sub_value : array_value) { RETURN_IF_ERROR(_write_data_to_column(&sub_value, type_desc.children[0], - array_column_ptr->get_data().get_ptr(), + array_column_ptr->get_data().get_ptr().get(), column_name + ".element", sub_serdes[0], valid)); } auto& offsets = array_column_ptr->get_offsets(); @@ -1653,7 +1654,7 @@ Status NewJsonReader::_simdjson_write_data_to_column(simdjson::ondemand::value& if (column_ptr->is_nullable()) { nullable_column = reinterpret_cast(column_ptr); - data_column_ptr = nullable_column->get_nested_column().get_ptr(); + data_column_ptr = nullable_column->get_nested_column().get_ptr().get(); data_serde = serde->get_nested_serdes()[0]; // kNullType will put 1 into the Null map, so there is no need to push 0 for kNullType. @@ -1727,7 +1728,7 @@ Status NewJsonReader::_simdjson_write_data_to_column(simdjson::ondemand::value& const auto& sub_col_type = type_desc.children[sub_column_idx]; RETURN_IF_ERROR(_simdjson_write_data_to_column( - sub.value(), sub_col_type, sub_column_ptr, column_name + "." + sub_key, + sub.value(), sub_col_type, sub_column_ptr.get(), column_name + "." + sub_key, sub_serdes[sub_column_idx], valid)); } @@ -1768,7 +1769,7 @@ Status NewJsonReader::_simdjson_write_data_to_column(simdjson::ondemand::value& auto nullable_column = static_cast(column_ptr); nullable_column->get_null_map_data().push_back(0); - data_column_ptr = nullable_column->get_nested_column().get_ptr(); + data_column_ptr = nullable_column->get_nested_column().get_ptr().get(); data_serde = serde->get_nested_serdes()[0]; } Slice slice(key_view.data(), key_view.length()); @@ -1779,13 +1780,13 @@ Status NewJsonReader::_simdjson_write_data_to_column(simdjson::ondemand::value& }; RETURN_IF_ERROR(f(member_value.unescaped_key(), type_desc.children[0], - map_column_ptr->get_keys_ptr()->assume_mutable()->get_ptr(), + map_column_ptr->get_keys_ptr()->assume_mutable()->get_ptr().get(), sub_serdes[0], _serde_options, valid)); simdjson::ondemand::value field_value = member_value.value(); RETURN_IF_ERROR(_simdjson_write_data_to_column( field_value, type_desc.children[1], - map_column_ptr->get_values_ptr()->assume_mutable()->get_ptr(), + map_column_ptr->get_values_ptr()->assume_mutable()->get_ptr().get(), column_name + ".value", sub_serdes[1], valid)); field_count++; } @@ -1807,7 +1808,7 @@ Status NewJsonReader::_simdjson_write_data_to_column(simdjson::ondemand::value& int field_count = 0; for (simdjson::ondemand::value sub_value : array_value) { RETURN_IF_ERROR(_simdjson_write_data_to_column( - sub_value, type_desc.children[0], array_column_ptr->get_data().get_ptr(), + sub_value, type_desc.children[0], array_column_ptr->get_data().get_ptr().get(), column_name + ".element", sub_serdes[0], valid)); field_count++; } diff --git a/be/src/vec/exec/format/orc/vorc_reader.cpp b/be/src/vec/exec/format/orc/vorc_reader.cpp index a1ecb1ae0dcf8b..4d41830668960c 100644 --- a/be/src/vec/exec/format/orc/vorc_reader.cpp +++ b/be/src/vec/exec/format/orc/vorc_reader.cpp @@ -143,7 +143,7 @@ void ORCFileInputStream::read(void* buf, uint64_t length, uint64_t offset) { OrcReader::OrcReader(RuntimeProfile* profile, RuntimeState* state, const TFileScanRangeParams& params, const TFileRangeDesc& range, size_t batch_size, const std::string& ctz, io::IOContext* io_ctx, - bool enable_lazy_mat, std::vector* unsupported_pushdown_types) + bool enable_lazy_mat) : _profile(profile), _state(state), _scan_params(params), @@ -156,8 +156,7 @@ OrcReader::OrcReader(RuntimeProfile* profile, RuntimeState* state, _enable_lazy_mat(enable_lazy_mat), _enable_filter_by_min_max( state == nullptr ? true : state->query_options().enable_orc_filter_by_min_max), - _dict_cols_has_converted(false), - _unsupported_pushdown_types(unsupported_pushdown_types) { + _dict_cols_has_converted(false) { TimezoneUtils::find_cctz_time_zone(ctz, _time_zone); VecDateTimeValue t; t.from_unixtime(0, ctz); @@ -460,7 +459,8 @@ static std::unordered_map TYPEKIND_TO_PRE {orc::TypeKind::DOUBLE, orc::PredicateDataType::FLOAT}, {orc::TypeKind::STRING, orc::PredicateDataType::STRING}, {orc::TypeKind::BINARY, orc::PredicateDataType::STRING}, - {orc::TypeKind::CHAR, orc::PredicateDataType::STRING}, + // should not pust down CHAR type, because CHAR type is fixed length and will be padded + // {orc::TypeKind::CHAR, orc::PredicateDataType::STRING}, {orc::TypeKind::VARCHAR, orc::PredicateDataType::STRING}, {orc::TypeKind::DATE, orc::PredicateDataType::DATE}, {orc::TypeKind::DECIMAL, orc::PredicateDataType::DECIMAL}, @@ -492,8 +492,9 @@ std::tuple convert_to_orc_literal(const orc::Type* type, [[fallthrough]]; case orc::TypeKind::BINARY: [[fallthrough]]; - case orc::TypeKind::CHAR: - [[fallthrough]]; + // should not pust down CHAR type, because CHAR type is fixed length and will be padded + // case orc::TypeKind::CHAR: + // [[fallthrough]]; case orc::TypeKind::VARCHAR: { return std::make_tuple(true, orc::Literal(literal_data.data, literal_data.size)); } @@ -593,7 +594,15 @@ std::tuple OrcReader::_make_orc_lite auto literal_data = literal->get_column_ptr()->get_data_at(0); auto* slot = _tuple_descriptor->slots()[slot_ref->column_id()]; auto slot_type = slot->type(); - switch (slot_type.type) { + auto primitive_type = slot_type.type; + auto src_type = OrcReader::convert_to_doris_type(orc_type).type; + // should not down predicate for string type change from other type + if (src_type != primitive_type && !is_string_type(src_type) && is_string_type(primitive_type)) { + LOG(WARNING) << "Unsupported Push Down Schema Changed Column " << primitive_type << " to " + << src_type; + return std::make_tuple(false, orc::Literal(false), orc::PredicateDataType::LONG); + } + switch (primitive_type) { #define M(NAME) \ case TYPE_##NAME: { \ auto [valid, orc_literal] = convert_to_orc_literal( \ @@ -606,7 +615,6 @@ std::tuple OrcReader::_make_orc_lite M(INT) \ M(BIGINT) \ M(LARGEINT) \ - M(CHAR) \ M(DATE) \ M(DATETIME) \ M(DATEV2) \ diff --git a/be/src/vec/exec/format/orc/vorc_reader.h b/be/src/vec/exec/format/orc/vorc_reader.h index 0dd19077bcf0af..6bbf3bead1efce 100644 --- a/be/src/vec/exec/format/orc/vorc_reader.h +++ b/be/src/vec/exec/format/orc/vorc_reader.h @@ -129,8 +129,7 @@ class OrcReader : public GenericReader { OrcReader(RuntimeProfile* profile, RuntimeState* state, const TFileScanRangeParams& params, const TFileRangeDesc& range, size_t batch_size, const std::string& ctz, - io::IOContext* io_ctx, bool enable_lazy_mat = true, - std::vector* unsupported_pushdown_types = nullptr); + io::IOContext* io_ctx, bool enable_lazy_mat = true); OrcReader(const TFileScanRangeParams& params, const TFileRangeDesc& range, const std::string& ctz, io::IOContext* io_ctx, bool enable_lazy_mat = true); @@ -639,7 +638,6 @@ class OrcReader : public GenericReader { std::unique_ptr _string_dict_filter; bool _dict_cols_has_converted = false; bool _has_complex_type = false; - std::vector* _unsupported_pushdown_types; // resolve schema change std::unordered_map> _converters; diff --git a/be/src/vec/exec/format/parquet/parquet_column_convert.cpp b/be/src/vec/exec/format/parquet/parquet_column_convert.cpp index 0a5ef2913dd940..49636d809aa0d8 100644 --- a/be/src/vec/exec/format/parquet/parquet_column_convert.cpp +++ b/be/src/vec/exec/format/parquet/parquet_column_convert.cpp @@ -79,7 +79,7 @@ ColumnPtr PhysicalToLogicalConverter::get_physical_column(tparquet::Type::type s return dst_logical_column; } - if (_cached_src_physical_column == nullptr) { + if (!_cached_src_physical_column) { switch (src_physical_type) { case tparquet::Type::type::BOOLEAN: _cached_src_physical_type = std::make_shared(); diff --git a/be/src/vec/exec/format/wal/wal_reader.cpp b/be/src/vec/exec/format/wal/wal_reader.cpp index 22e6928216e1e8..a9a209b95a4ce1 100644 --- a/be/src/vec/exec/format/wal/wal_reader.cpp +++ b/be/src/vec/exec/format/wal/wal_reader.cpp @@ -92,7 +92,7 @@ Status WalReader::get_next_block(Block* block, size_t* read_rows, bool* eof) { pos, src_block.columns()); } vectorized::ColumnPtr column_ptr = src_block.get_by_position(pos).column; - if (column_ptr != nullptr && slot_desc->is_nullable()) { + if (!column_ptr && slot_desc->is_nullable()) { column_ptr = make_nullable(column_ptr); } dst_block.insert(index, vectorized::ColumnWithTypeAndName( diff --git a/be/src/vec/exec/jni_connector.cpp b/be/src/vec/exec/jni_connector.cpp index 11a58e81c98d89..4b5bb72e57bfbd 100644 --- a/be/src/vec/exec/jni_connector.cpp +++ b/be/src/vec/exec/jni_connector.cpp @@ -241,7 +241,7 @@ Status JniConnector::fill_block(Block* block, const ColumnNumbers& arguments, lo TableMetaAddress table_meta(table_address); long num_rows = table_meta.next_meta_as_long(); for (size_t i : arguments) { - if (block->get_by_position(i).column == nullptr) { + if (block->get_by_position(i).column.get() == nullptr) { auto return_type = block->get_data_type(i); bool result_nullable = return_type->is_nullable(); ColumnUInt8::MutablePtr null_col = nullptr; diff --git a/be/src/vec/exec/scan/vfile_scanner.cpp b/be/src/vec/exec/scan/vfile_scanner.cpp index 76639e4bed4a28..15b681f597975e 100644 --- a/be/src/vec/exec/scan/vfile_scanner.cpp +++ b/be/src/vec/exec/scan/vfile_scanner.cpp @@ -596,7 +596,7 @@ Status VFileScanner::_convert_to_output_block(Block* block) { column_ptr = _src_block_ptr->get_by_position(result_column_id).column; // column_ptr maybe a ColumnConst, convert it to a normal column column_ptr = column_ptr->convert_to_full_column_if_const(); - DCHECK(column_ptr != nullptr); + DCHECK(column_ptr); // because of src_slot_desc is always be nullable, so the column_ptr after do dest_expr // is likely to be nullable @@ -879,17 +879,9 @@ Status VFileScanner::_get_next_reader() { break; } case TFileFormatType::FORMAT_ORC: { - std::vector* unsupported_pushdown_types = nullptr; - if (range.__isset.table_format_params && - range.table_format_params.table_format_type == "paimon") { - static std::vector paimon_unsupport_type = - std::vector {orc::TypeKind::CHAR}; - unsupported_pushdown_types = &paimon_unsupport_type; - } std::unique_ptr orc_reader = OrcReader::create_unique( _profile, _state, *_params, range, _state->query_options().batch_size, - _state->timezone(), _io_ctx.get(), _state->query_options().enable_orc_lazy_mat, - unsupported_pushdown_types); + _state->timezone(), _io_ctx.get(), _state->query_options().enable_orc_lazy_mat); orc_reader->set_push_down_agg_type(_get_push_down_agg_type()); if (push_down_predicates) { RETURN_IF_ERROR(_process_late_arrival_conjuncts()); diff --git a/be/src/vec/exec/scan/vmeta_scanner.cpp b/be/src/vec/exec/scan/vmeta_scanner.cpp index 289930b16bce85..db0256728741c7 100644 --- a/be/src/vec/exec/scan/vmeta_scanner.cpp +++ b/be/src/vec/exec/scan/vmeta_scanner.cpp @@ -148,7 +148,7 @@ Status VMetaScanner::_fill_block_with_remote_data(const std::vectoris_nullable()) { auto& null_col = reinterpret_cast(*col_ptr); null_col.get_null_map_data().push_back(0); - col_ptr = null_col.get_nested_column_ptr(); + col_ptr = null_col.get_nested_column_ptr().get(); } switch (slot_desc->type().type) { case TYPE_BOOLEAN: { diff --git a/be/src/vec/exprs/vcompound_pred.h b/be/src/vec/exprs/vcompound_pred.h index ff7649600b4c7f..e3c02f554b3d36 100644 --- a/be/src/vec/exprs/vcompound_pred.h +++ b/be/src/vec/exprs/vcompound_pred.h @@ -272,8 +272,10 @@ class VCompoundPred : public VectorizedFnCall { auto col_res = ColumnUInt8::create(size); auto col_nulls = ColumnUInt8::create(size); - auto* __restrict res_datas = assert_cast(col_res)->get_data().data(); - auto* __restrict res_nulls = assert_cast(col_nulls)->get_data().data(); + auto* __restrict res_datas = + assert_cast(col_res.get())->get_data().data(); + auto* __restrict res_nulls = + assert_cast(col_nulls.get())->get_data().data(); ColumnPtr temp_null_map = nullptr; // maybe both children are nullable / or one of children is nullable auto* __restrict lhs_null_map_tmp = create_null_map_column(temp_null_map, lhs_null_map); diff --git a/be/src/vec/functions/array/function_array_cum_sum.cpp b/be/src/vec/functions/array/function_array_cum_sum.cpp index 2f93a2a83b1a89..5fba7d4a619bd5 100644 --- a/be/src/vec/functions/array/function_array_cum_sum.cpp +++ b/be/src/vec/functions/array/function_array_cum_sum.cpp @@ -118,7 +118,7 @@ class FunctionArrayCumSum : public IFunction { // get null map const ColumnNullable* src_nested_nullable_col = check_and_get_column(*src_nested_column); - src_nested_column = src_nested_nullable_col->get_nested_column_ptr(); + src_nested_column = src_nested_nullable_col->get_nested_column_ptr().get(); const NullMapType& src_null_map = src_nested_nullable_col->get_null_map_column().get_data(); ColumnPtr res_nested_ptr; diff --git a/be/src/vec/functions/array/function_array_distinct.h b/be/src/vec/functions/array/function_array_distinct.h index 4b7e3e6f035d48..4d37f7cbcf7133 100644 --- a/be/src/vec/functions/array/function_array_distinct.h +++ b/be/src/vec/functions/array/function_array_distinct.h @@ -102,14 +102,14 @@ class FunctionArrayDistinct : public IFunction { if (src_nested_column->is_nullable()) { const auto* src_nested_nullable_col = check_and_get_column(*src_nested_column); - src_nested_column = src_nested_nullable_col->get_nested_column_ptr(); + src_nested_column = src_nested_nullable_col->get_nested_column_ptr().get(); src_null_map = &src_nested_nullable_col->get_null_map_column().get_data(); } NullMapType* dest_null_map = nullptr; if (dest_nested_column->is_nullable()) { auto* dest_nested_nullable_col = reinterpret_cast(dest_nested_column); - dest_nested_column = dest_nested_nullable_col->get_nested_column_ptr(); + dest_nested_column = dest_nested_nullable_col->get_nested_column_ptr().get(); dest_null_map = &dest_nested_nullable_col->get_null_map_column().get_data(); } diff --git a/be/src/vec/functions/array/function_array_enumerate.cpp b/be/src/vec/functions/array/function_array_enumerate.cpp index 0e8bca3e5cd3b1..3846addb83bb55 100644 --- a/be/src/vec/functions/array/function_array_enumerate.cpp +++ b/be/src/vec/functions/array/function_array_enumerate.cpp @@ -83,7 +83,7 @@ class FunctionArrayEnumerate : public IFunction { auto left_column = block.get_by_position(arguments[0]).column->convert_to_full_column_if_const(); const ColumnArray* array = - check_and_get_column(remove_nullable(left_column->get_ptr())); + check_and_get_column(remove_nullable(left_column->get_ptr()).get()); if (!array) { return Status::RuntimeError( fmt::format("Illegal column {}, of first argument of function {}", @@ -107,7 +107,8 @@ class FunctionArrayEnumerate : public IFunction { ColumnPtr res_column = ColumnArray::create(std::move(nested_column), array->get_offsets_ptr()); if (block.get_by_position(arguments[0]).column->is_nullable()) { - const ColumnNullable* nullable = check_and_get_column(left_column); + const ColumnNullable* nullable = + check_and_get_column(left_column.get()); res_column = ColumnNullable::create( res_column, nullable->get_null_map_column().clone_resized(nullable->size())); } diff --git a/be/src/vec/functions/array/function_array_enumerate_uniq.cpp b/be/src/vec/functions/array/function_array_enumerate_uniq.cpp index 21d6ab40007b6e..bdee406655f196 100644 --- a/be/src/vec/functions/array/function_array_enumerate_uniq.cpp +++ b/be/src/vec/functions/array/function_array_enumerate_uniq.cpp @@ -128,7 +128,7 @@ class FunctionArrayEnumerateUniq : public IFunction { block.get_by_position(arguments[i]).column->convert_to_full_column_if_const()); ColumnPtr& cur_column = src_columns[i]; const ColumnArray* array = - check_and_get_column(remove_nullable(cur_column->get_ptr())); + check_and_get_column(remove_nullable(cur_column->get_ptr()).get()); if (!array) { return Status::RuntimeError( fmt::format("Illegal column {}, of first argument of function {}", @@ -151,7 +151,7 @@ class FunctionArrayEnumerateUniq : public IFunction { const NullMapType* null_map = nullptr; if (arguments.size() == 1 && data_columns[0]->is_nullable()) { const ColumnNullable* nullable = check_and_get_column(*data_columns[0]); - data_columns[0] = nullable->get_nested_column_ptr(); + data_columns[0] = nullable->get_nested_column_ptr().get(); null_map = &nullable->get_null_map_column().get_data(); } @@ -219,7 +219,8 @@ class FunctionArrayEnumerateUniq : public IFunction { if (arguments.size() == 1 && block.get_by_position(arguments[0]).column->is_nullable()) { auto left_column = block.get_by_position(arguments[0]).column->convert_to_full_column_if_const(); - const ColumnNullable* nullable = check_and_get_column(left_column); + const ColumnNullable* nullable = + check_and_get_column(left_column.get()); res_column = ColumnNullable::create( res_column, nullable->get_null_map_column().clone_resized(nullable->size())); } diff --git a/be/src/vec/functions/array/function_array_join.h b/be/src/vec/functions/array/function_array_join.h index 957b2288fb746a..29521c36111824 100644 --- a/be/src/vec/functions/array/function_array_join.h +++ b/be/src/vec/functions/array/function_array_join.h @@ -78,10 +78,11 @@ struct ArrayJoinImpl { auto nested_type = data_type_array->get_nested_type(); auto dest_column_ptr = ColumnString::create(); - DCHECK(dest_column_ptr != nullptr); + DCHECK(dest_column_ptr); - auto res_val = _execute_by_type(*src.nested_col, *src.offsets_ptr, src.nested_nullmap_data, - sep_str, null_replace_str, nested_type, dest_column_ptr); + auto res_val = + _execute_by_type(*src.nested_col, *src.offsets_ptr, src.nested_nullmap_data, + sep_str, null_replace_str, nested_type, dest_column_ptr.get()); if (!res_val) { return Status::RuntimeError(fmt::format( "execute failed or unsupported types for function {}({},{},{})", "array_join", diff --git a/be/src/vec/functions/array/function_array_map.h b/be/src/vec/functions/array/function_array_map.h index fd4a2fc59f3548..5bfe723e232884 100644 --- a/be/src/vec/functions/array/function_array_map.h +++ b/be/src/vec/functions/array/function_array_map.h @@ -165,7 +165,7 @@ struct ArrayMapImpl { static Status execute(ColumnPtr& res_ptr, ColumnArrayExecutionDatas datas, std::vector& col_const, size_t start_row, size_t end_row) { ColumnArrayMutableData dst = - create_mutable_data(datas[0].nested_col, datas[0].nested_nullmap_data); + create_mutable_data(datas[0].nested_col.get(), datas[0].nested_nullmap_data); if (_execute_internal(dst, datas, col_const, start_row, end_row)) { res_ptr = assemble_column_array(dst); return Status::OK(); diff --git a/be/src/vec/functions/array/function_array_pop.cpp b/be/src/vec/functions/array/function_array_pop.cpp index 2182699e0205b5..1ddd767cfaf3ce 100644 --- a/be/src/vec/functions/array/function_array_pop.cpp +++ b/be/src/vec/functions/array/function_array_pop.cpp @@ -75,7 +75,7 @@ class FunctionArrayPop : public IFunction { } // prepare dst array column bool is_nullable = src.nested_nullmap_data != nullptr; - ColumnArrayMutableData dst = create_mutable_data(src.nested_col, is_nullable); + ColumnArrayMutableData dst = create_mutable_data(src.nested_col.get(), is_nullable); dst.offsets_ptr->reserve(input_rows_count); // start from index depending on the PopType::start_offset auto offset_column = ColumnInt64::create(array_column->size(), PopType::start_offset); diff --git a/be/src/vec/functions/array/function_array_range.cpp b/be/src/vec/functions/array/function_array_range.cpp index 8a3de3754503ae..ffb5987c744d1f 100644 --- a/be/src/vec/functions/array/function_array_range.cpp +++ b/be/src/vec/functions/array/function_array_range.cpp @@ -16,10 +16,10 @@ // under the License. #include -#include #include #include +#include #include #include @@ -41,11 +41,11 @@ #include "vec/data_types/data_type_date_time.h" #include "vec/data_types/data_type_nullable.h" #include "vec/data_types/data_type_number.h" +#include "vec/data_types/data_type_time_v2.h" #include "vec/functions/function.h" #include "vec/functions/function_date_or_datetime_computation.h" #include "vec/functions/simple_function_factory.h" #include "vec/runtime/vdatetime_value.h" -#include "vec/utils/util.hpp" namespace doris { class FunctionContext; @@ -137,7 +137,7 @@ struct RangeImplUtil { IColumn* dest_nested_column = &dest_array_column_ptr->get_data(); ColumnNullable* dest_nested_nullable_col = reinterpret_cast(dest_nested_column); - dest_nested_column = dest_nested_nullable_col->get_nested_column_ptr(); + dest_nested_column = dest_nested_nullable_col->get_nested_column_ptr().get(); auto& dest_nested_null_map = dest_nested_nullable_col->get_null_map_column().get_data(); auto args_null_map = ColumnUInt8::create(input_rows_count, 0); @@ -229,10 +229,9 @@ struct RangeImplUtil { dest_nested_null_map.push_back(0); offset++; move++; - idx = doris::vectorized::date_time_add< - UNIT::value, DateV2Value, - DateV2Value, DateTimeV2>(idx, step_row, - is_null); + idx = doris::vectorized::date_time_add(idx, step_row, + is_null); } dest_offsets.push_back(offset); } diff --git a/be/src/vec/functions/array/function_array_remove.h b/be/src/vec/functions/array/function_array_remove.h index 197b032b0f8a4b..661a18170ed9dc 100644 --- a/be/src/vec/functions/array/function_array_remove.h +++ b/be/src/vec/functions/array/function_array_remove.h @@ -107,13 +107,13 @@ class FunctionArrayRemove : public IFunction { auto dst_nested_column = ColumnNullable::create(nested_column.clone_empty(), ColumnUInt8::create()); array_nested_column = dst_nested_column->get_ptr(); - dst_column = dst_nested_column->get_nested_column_ptr(); + dst_column = dst_nested_column->get_nested_column_ptr().get(); dst_null_map = &dst_nested_column->get_null_map_data(); dst_null_map->reserve(offsets.back()); } else { auto dst_nested_column = nested_column.clone_empty(); array_nested_column = dst_nested_column->get_ptr(); - dst_column = dst_nested_column; + dst_column = dst_nested_column.get(); } auto& dst_data = reinterpret_cast(*dst_column).get_data(); @@ -179,13 +179,13 @@ class FunctionArrayRemove : public IFunction { auto dst_nested_column = ColumnNullable::create(nested_column.clone_empty(), ColumnUInt8::create()); array_nested_column = dst_nested_column->get_ptr(); - dst_column = dst_nested_column->get_nested_column_ptr(); + dst_column = dst_nested_column->get_nested_column_ptr().get(); dst_null_map = &dst_nested_column->get_null_map_data(); dst_null_map->reserve(offsets.back()); } else { auto dst_nested_column = nested_column.clone_empty(); array_nested_column = dst_nested_column->get_ptr(); - dst_column = dst_nested_column; + dst_column = dst_nested_column.get(); } auto& dst_offs = reinterpret_cast(*dst_column).get_offsets(); diff --git a/be/src/vec/functions/array/function_array_reverse.h b/be/src/vec/functions/array/function_array_reverse.h index 8567bc61158bab..9fc1623151801e 100644 --- a/be/src/vec/functions/array/function_array_reverse.h +++ b/be/src/vec/functions/array/function_array_reverse.h @@ -40,7 +40,7 @@ struct ArrayReverseImpl { } bool is_nullable = src.nested_nullmap_data ? true : false; - ColumnArrayMutableData dst = create_mutable_data(src.nested_col, is_nullable); + ColumnArrayMutableData dst = create_mutable_data(src.nested_col.get(), is_nullable); dst.offsets_ptr->reserve(input_rows_count); auto res_val = _execute_internal(*src.nested_col, *src.offsets_ptr, *dst.nested_col, diff --git a/be/src/vec/functions/array/function_array_set.h b/be/src/vec/functions/array/function_array_set.h index 1ecf6d72531c73..975268b1e61553 100644 --- a/be/src/vec/functions/array/function_array_set.h +++ b/be/src/vec/functions/array/function_array_set.h @@ -142,9 +142,9 @@ struct ArraySetImpl { bool right_const) { ColumnArrayMutableData dst; if (left_data.nested_nullmap_data || right_data.nested_nullmap_data) { - dst = create_mutable_data(left_data.nested_col, true); + dst = create_mutable_data(left_data.nested_col.get(), true); } else { - dst = create_mutable_data(left_data.nested_col, false); + dst = create_mutable_data(left_data.nested_col.get(), false); } ColumnPtr res_column; if (left_const) { diff --git a/be/src/vec/functions/array/function_array_slice.h b/be/src/vec/functions/array/function_array_slice.h index 2acd1d3fbe1fd4..76082b266026ea 100644 --- a/be/src/vec/functions/array/function_array_slice.h +++ b/be/src/vec/functions/array/function_array_slice.h @@ -89,7 +89,7 @@ class FunctionArraySlice : public IFunction { } // prepare dst array column bool is_nullable = src.nested_nullmap_data ? true : false; - ColumnArrayMutableData dst = create_mutable_data(src.nested_col, is_nullable); + ColumnArrayMutableData dst = create_mutable_data(src.nested_col.get(), is_nullable); dst.offsets_ptr->reserve(input_rows_count); // execute slice_array(dst, src, *offset_column, length_column.get()); diff --git a/be/src/vec/functions/array/function_array_sortby.cpp b/be/src/vec/functions/array/function_array_sortby.cpp index 899bb40fba1423..fe6799aaa2e876 100644 --- a/be/src/vec/functions/array/function_array_sortby.cpp +++ b/be/src/vec/functions/array/function_array_sortby.cpp @@ -95,13 +95,13 @@ class FunctionArraySortBy : public IFunction { src_column_array.get_offsets_column().clone_resized(input_rows_count); MutableColumnPtr result_nullmap = nullptr; const ColumnUInt8::Container* src_null_map_data = nullptr; - if (argument_nullmap[0] != nullptr) { + if (argument_nullmap[0]) { const auto& src_column_nullmap = assert_cast(*argument_nullmap[0]); result_nullmap = src_column_nullmap.clone_resized(input_rows_count); src_null_map_data = &(src_column_nullmap.get_data()); } const ColumnUInt8::Container* key_null_map_data = nullptr; - if (argument_nullmap[1] != nullptr) { + if (argument_nullmap[1]) { const auto& key_column_nullmap = assert_cast(*argument_nullmap[1]); key_null_map_data = &(key_column_nullmap.get_data()); } @@ -149,7 +149,7 @@ class FunctionArraySortBy : public IFunction { } } src_nested_nullable_column.append_data_by_selector(result_data_column, src_selector); - if (result_nullmap != nullptr) { + if (result_nullmap) { block.replace_by_position( result, ColumnNullable::create(ColumnArray::create(std::move(result_data_column), diff --git a/be/src/vec/functions/array/function_arrays_overlap.h b/be/src/vec/functions/array/function_arrays_overlap.h index dd993100885e3a..8ac21bcd710f8d 100644 --- a/be/src/vec/functions/array/function_arrays_overlap.h +++ b/be/src/vec/functions/array/function_arrays_overlap.h @@ -370,11 +370,11 @@ class FunctionArraysOverlap : public IFunction { ExecutorImpl impl; if (right_size < left_size) { - impl.insert_array(right_data.nested_col, right_start, right_size); - dst_data[row] = impl.find_any(left_data.nested_col, left_start, left_size); + impl.insert_array(right_data.nested_col.get(), right_start, right_size); + dst_data[row] = impl.find_any(left_data.nested_col.get(), left_start, left_size); } else { - impl.insert_array(left_data.nested_col, left_start, left_size); - dst_data[row] = impl.find_any(right_data.nested_col, right_start, right_size); + impl.insert_array(left_data.nested_col.get(), left_start, left_size); + dst_data[row] = impl.find_any(right_data.nested_col.get(), right_start, right_size); } } return Status::OK(); diff --git a/be/src/vec/functions/comparison_equal_for_null.cpp b/be/src/vec/functions/comparison_equal_for_null.cpp index 919f9ebed65a7c..35719cf573008a 100644 --- a/be/src/vec/functions/comparison_equal_for_null.cpp +++ b/be/src/vec/functions/comparison_equal_for_null.cpp @@ -139,18 +139,20 @@ class FunctionEqForNull : public IFunction { left_column = check_and_get_column( assert_cast( col_left.column.get()) - ->get_data_column_ptr()); + ->get_data_column_ptr() + .get()); } else { - left_column = check_and_get_column(col_left.column); + left_column = check_and_get_column(col_left.column.get()); } if (right_const) { right_column = check_and_get_column( assert_cast( col_right.column.get()) - ->get_data_column_ptr()); + ->get_data_column_ptr() + .get()); } else { - right_column = check_and_get_column(col_right.column); + right_column = check_and_get_column(col_right.column.get()); } bool left_nullable = left_column != nullptr; diff --git a/be/src/vec/functions/function_agg_state.h b/be/src/vec/functions/function_agg_state.h index f4b7aef23af220..84a8d4f6f8b055 100644 --- a/be/src/vec/functions/function_agg_state.h +++ b/be/src/vec/functions/function_agg_state.h @@ -82,7 +82,7 @@ class FunctionAggState : public IFunction { save_columns.push_back(column); } - agg_columns.push_back(column); + agg_columns.push_back(column.get()); } _agg_function->streaming_agg_serialize_to_column(agg_columns.data(), col, input_rows_count, &(context->get_arena())); diff --git a/be/src/vec/functions/function_binary_arithmetic.h b/be/src/vec/functions/function_binary_arithmetic.h index 4c0b8e7a0890dc..a2757b38346247 100644 --- a/be/src/vec/functions/function_binary_arithmetic.h +++ b/be/src/vec/functions/function_binary_arithmetic.h @@ -165,7 +165,7 @@ struct BinaryOperationImpl { static ColumnPtr adapt_normal_vector_constant(ColumnPtr column_left, B b) { auto column_left_ptr = - check_and_get_column(column_left); + check_and_get_column(column_left.get()); auto column_result = Base::ColumnVectorResult::create(column_left->size()); DCHECK(column_left_ptr != nullptr); @@ -182,7 +182,7 @@ struct BinaryOperationImpl { static ColumnPtr adapt_normal_constant_vector(A a, ColumnPtr column_right) { auto column_right_ptr = - check_and_get_column(column_right); + check_and_get_column(column_right.get()); auto column_result = Base::ColumnVectorResult::create(column_right->size()); DCHECK(column_right_ptr != nullptr); @@ -199,9 +199,9 @@ struct BinaryOperationImpl { static ColumnPtr adapt_normal_vector_vector(ColumnPtr column_left, ColumnPtr column_right) { auto column_left_ptr = - check_and_get_column(column_left); + check_and_get_column(column_left.get()); auto column_right_ptr = - check_and_get_column(column_right); + check_and_get_column(column_right.get()); auto column_result = Base::ColumnVectorResult::create(column_left->size()); DCHECK(column_left_ptr != nullptr && column_right_ptr != nullptr); @@ -447,7 +447,8 @@ struct DecimalBinaryOperation { auto type_result = assert_cast&, TypeCheckOnRelease::DISABLE>( *res_data_type); - auto column_left_ptr = check_and_get_column(column_left); + auto column_left_ptr = + check_and_get_column(column_left.get()); auto column_result = ColumnDecimal::create( column_left->size(), assert_cast&, TypeCheckOnRelease::DISABLE>( @@ -482,7 +483,8 @@ struct DecimalBinaryOperation { auto type_result = assert_cast&, TypeCheckOnRelease::DISABLE>( *res_data_type); - auto column_right_ptr = check_and_get_column(column_right); + auto column_right_ptr = + check_and_get_column(column_right.get()); auto column_result = ColumnDecimal::create( column_right->size(), assert_cast&, TypeCheckOnRelease::DISABLE>( @@ -515,8 +517,10 @@ struct DecimalBinaryOperation { const ResultType& max_result_number, const ResultType& scale_diff_multiplier, DataTypePtr res_data_type) { - auto column_left_ptr = check_and_get_column(column_left); - auto column_right_ptr = check_and_get_column(column_right); + auto column_left_ptr = + check_and_get_column(column_left.get()); + auto column_right_ptr = + check_and_get_column(column_right.get()); const auto& type_result = assert_cast&>(*res_data_type); auto column_result = @@ -847,8 +851,8 @@ struct ConstOrVectorAdapter { static ColumnPtr constant_constant(ColumnPtr column_left, ColumnPtr column_right, const LeftDataType& type_left, const RightDataType& type_right, DataTypePtr res_data_type) { - auto column_left_ptr = check_and_get_column(column_left); - auto column_right_ptr = check_and_get_column(column_right); + const auto* column_left_ptr = check_and_get_column(column_left.get()); + const auto* column_right_ptr = check_and_get_column(column_right.get()); DCHECK(column_left_ptr != nullptr && column_right_ptr != nullptr); ColumnPtr column_result = nullptr; @@ -875,7 +879,7 @@ struct ConstOrVectorAdapter { static ColumnPtr vector_constant(ColumnPtr column_left, ColumnPtr column_right, const LeftDataType& type_left, const RightDataType& type_right, DataTypePtr res_data_type) { - auto column_right_ptr = check_and_get_column(column_right); + const auto* column_right_ptr = check_and_get_column(column_right.get()); DCHECK(column_right_ptr != nullptr); if constexpr (result_is_decimal) { @@ -894,7 +898,7 @@ struct ConstOrVectorAdapter { static ColumnPtr constant_vector(ColumnPtr column_left, ColumnPtr column_right, const LeftDataType& type_left, const RightDataType& type_right, DataTypePtr res_data_type) { - auto column_left_ptr = check_and_get_column(column_left); + const auto* column_left_ptr = check_and_get_column(column_left.get()); DCHECK(column_left_ptr != nullptr); if constexpr (result_is_decimal) { diff --git a/be/src/vec/functions/function_bitmap.cpp b/be/src/vec/functions/function_bitmap.cpp index 92a5dba7b7a4d4..96cae50a9baf9a 100644 --- a/be/src/vec/functions/function_bitmap.cpp +++ b/be/src/vec/functions/function_bitmap.cpp @@ -1211,7 +1211,7 @@ class FunctionBitmapToArray : public IFunction { IColumn* dest_nested_column = &dest_array_column_ptr->get_data(); ColumnNullable* dest_nested_nullable_col = reinterpret_cast(dest_nested_column); - dest_nested_column = dest_nested_nullable_col->get_nested_column_ptr(); + dest_nested_column = dest_nested_nullable_col->get_nested_column_ptr().get(); auto& dest_nested_null_map = dest_nested_nullable_col->get_null_map_column().get_data(); auto& arg_col = block.get_by_position(arguments[0]).column; diff --git a/be/src/vec/functions/function_bitmap_variadic.cpp b/be/src/vec/functions/function_bitmap_variadic.cpp index 6e1a103fdbd83b..47a159e3c2f391 100644 --- a/be/src/vec/functions/function_bitmap_variadic.cpp +++ b/be/src/vec/functions/function_bitmap_variadic.cpp @@ -247,7 +247,7 @@ class FunctionBitMapVariadic : public IFunction { vec_res.resize(input_rows_count); RETURN_IF_ERROR(Impl::vector_vector(argument_columns.data(), argument_size, - input_rows_count, vec_res, col_res_nulls)); + input_rows_count, vec_res, col_res_nulls.get())); if (!use_default_implementation_for_nulls() && result_info.type->is_nullable()) { block.replace_by_position( result, ColumnNullable::create(std::move(col_res), std::move(col_res_nulls))); diff --git a/be/src/vec/functions/function_case.h b/be/src/vec/functions/function_case.h index af44ea0d9b1ace..81f08f682ef0ef 100644 --- a/be/src/vec/functions/function_case.h +++ b/be/src/vec/functions/function_case.h @@ -318,7 +318,7 @@ class FunctionCase : public IFunction { const uint8* __restrict then_idx, CaseWhenColumnHolder& column_holder) const { for (auto& then_ptr : column_holder.then_ptrs) { - then_ptr->reset(then_ptr.value()->convert_to_full_column_if_const()); + then_ptr->reset(then_ptr.value()->convert_to_full_column_if_const().get()); } size_t rows_count = column_holder.rows_count; diff --git a/be/src/vec/functions/function_cast.h b/be/src/vec/functions/function_cast.h index af9e9d19267073..483e837de5dfd8 100644 --- a/be/src/vec/functions/function_cast.h +++ b/be/src/vec/functions/function_cast.h @@ -770,7 +770,7 @@ struct ConvertImplGenericFromJsonb { continue; } ReadBuffer read_buffer((char*)(input_str.data()), input_str.size()); - Status st = data_type_to->from_string(read_buffer, col_to); + Status st = data_type_to->from_string(read_buffer, col_to.get()); // if parsing failed, will return null (*vec_null_map_to)[i] = !st.ok(); if (!st.ok()) { diff --git a/be/src/vec/functions/function_collection_in.h b/be/src/vec/functions/function_collection_in.h index ce58d63f44b655..35299c7ea672be 100644 --- a/be/src/vec/functions/function_collection_in.h +++ b/be/src/vec/functions/function_collection_in.h @@ -117,7 +117,8 @@ class FunctionCollectionIn : public IFunction { DCHECK(const_column_ptr != nullptr); const auto& [col, _] = unpack_if_const(const_column_ptr->column_ptr); if (col->is_nullable()) { - auto* null_col = vectorized::check_and_get_column(col); + const auto* null_col = + vectorized::check_and_get_column(col.get()); if (null_col->has_null()) { state->null_in_set = true; } else { @@ -161,7 +162,7 @@ class FunctionCollectionIn : public IFunction { if (materialized_column_not_null->is_nullable()) { materialized_column_not_null = assert_cast( vectorized::check_and_get_column( - materialized_column_not_null) + materialized_column_not_null.get()) ->get_nested_column_ptr()); } diff --git a/be/src/vec/functions/function_date_or_datetime_computation.cpp b/be/src/vec/functions/function_date_or_datetime_computation.cpp index f6bf806ad46c1d..ece897d6dcbf7c 100644 --- a/be/src/vec/functions/function_date_or_datetime_computation.cpp +++ b/be/src/vec/functions/function_date_or_datetime_computation.cpp @@ -55,7 +55,7 @@ using FunctionWeeksDiff = using FunctionHoursDiff = FunctionDateOrDateTimeComputation>; using FunctionMinutesDiff = - FunctionDateOrDateTimeComputation>; + FunctionDateOrDateTimeComputation>; using FunctionSecondsDiff = FunctionDateOrDateTimeComputation>; @@ -68,6 +68,7 @@ struct NowFunctionName { static constexpr auto name = "now"; }; +//TODO: remove the inter-layer CurrentDateTimeImpl using FunctionNow = FunctionCurrentDateOrDateTime>; using FunctionNowWithPrecision = diff --git a/be/src/vec/functions/function_date_or_datetime_computation.h b/be/src/vec/functions/function_date_or_datetime_computation.h index 330ea75cba96c8..8165f57881b839 100644 --- a/be/src/vec/functions/function_date_or_datetime_computation.h +++ b/be/src/vec/functions/function_date_or_datetime_computation.h @@ -17,13 +17,12 @@ #pragma once -#include -#include - #include #include +#include #include #include +#include #include #include @@ -32,7 +31,6 @@ #include "common/exception.h" #include "common/logging.h" #include "common/status.h" -#include "fmt/format.h" #include "runtime/runtime_state.h" #include "udf/udf.h" #include "util/binary_cast.hpp" @@ -45,12 +43,10 @@ #include "vec/columns/columns_number.h" #include "vec/common/assert_cast.h" #include "vec/common/pod_array_fwd.h" -#include "vec/common/typeid_cast.h" #include "vec/core/block.h" #include "vec/core/column_numbers.h" #include "vec/core/column_with_type_and_name.h" #include "vec/core/columns_with_type_and_name.h" -#include "vec/core/field.h" #include "vec/core/types.h" #include "vec/data_types/data_type.h" #include "vec/data_types/data_type_date.h" @@ -67,73 +63,57 @@ namespace doris::vectorized { -template -extern ResultType date_time_add(const Arg& t, Int64 delta, bool& is_null) { - auto ts_value = binary_cast(t); +/// because all these functions(xxx_add/xxx_sub) defined in FE use Integer as the second value +/// so Int32 as delta is enough. For upstream(FunctionDateOrDateTimeComputation) we also could use Int32. + +template +ReturnNativeType date_time_add(const InputNativeType& t, Int32 delta, bool& is_null) { + using DateValueType = date_cast::TypeToValueTypeV; + using ResultDateValueType = date_cast::TypeToValueTypeV; + // e.g.: for DatatypeDatetimeV2, cast from u64 to DateV2Value + auto ts_value = binary_cast(t); TimeInterval interval(unit, delta, false); - if constexpr (std::is_same_v || - std::is_same_v) { + if constexpr (std::is_same_v) { is_null = !(ts_value.template date_add_interval(interval)); - - return binary_cast(ts_value); + // here DateValueType = ResultDateValueType + return binary_cast(ts_value); } else { + // this is for HOUR/MINUTE/SECOND/MS_ADD for datev2. got datetimev2 but not datev2. so need this two-arg reload to assign. ResultDateValueType res; is_null = !(ts_value.template date_add_interval(interval, res)); - return binary_cast(res); + return binary_cast(res); } } -#define ADD_TIME_FUNCTION_IMPL(CLASS, NAME, UNIT) \ - template \ - struct CLASS { \ - using ReturnType = std::conditional_t< \ - date_cast::IsV1(), DataTypeDateTime, \ - std::conditional_t< \ - std::is_same_v, \ - std::conditional_t, \ - DataTypeDateTimeV2>>; \ - using ReturnNativeType = \ - date_cast::ValueTypeOfColumnV>; \ - using InputNativeType = date_cast::ValueTypeOfColumnV>; \ - static constexpr auto name = #NAME; \ - static constexpr auto is_nullable = true; \ - static inline ReturnNativeType execute(const InputNativeType& t, Int64 delta, \ - bool& is_null) { \ - if constexpr (std::is_same_v || \ - std::is_same_v) { \ - return date_time_add(t, delta, \ - is_null); \ - } else if constexpr (std::is_same_v) { \ - if constexpr (TimeUnit::UNIT == TimeUnit::HOUR || \ - TimeUnit::UNIT == TimeUnit::MINUTE || \ - TimeUnit::UNIT == TimeUnit::SECOND || \ - TimeUnit::UNIT == TimeUnit::SECOND_MICROSECOND) { \ - return date_time_add, \ - DateV2Value, ReturnNativeType>( \ - t, delta, is_null); \ - } else { \ - return date_time_add, \ - DateV2Value, ReturnNativeType>(t, delta, \ - is_null); \ - } \ - \ - } else { \ - return date_time_add, \ - DateV2Value, ReturnNativeType>(t, delta, \ - is_null); \ - } \ - } \ - \ - static DataTypes get_variadic_argument_types() { \ - return {std::make_shared(), std::make_shared()}; \ - } \ +#define ADD_TIME_FUNCTION_IMPL(CLASS, NAME, UNIT) \ + template \ + struct CLASS { \ + /* for V1 type all return Datetime. for V2 type, if unit <= hour, increase to DatetimeV2 */ \ + using ReturnType = std::conditional_t< \ + date_cast::IsV1(), DataTypeDateTime, \ + std::conditional_t< \ + std::is_same_v, \ + std::conditional_t, \ + DataTypeDateTimeV2>>; \ + using ReturnNativeType = ReturnType::FieldType; \ + using InputNativeType = ArgType::FieldType; \ + static constexpr auto name = #NAME; \ + static constexpr auto is_nullable = true; \ + static inline ReturnNativeType execute(const InputNativeType& t, Int32 delta, \ + bool& is_null) { \ + return date_time_add(t, delta, is_null); \ + } \ + \ + static DataTypes get_variadic_argument_types() { \ + return {std::make_shared(), std::make_shared()}; \ + } \ } ADD_TIME_FUNCTION_IMPL(AddMicrosecondsImpl, microseconds_add, MICROSECOND); @@ -146,46 +126,32 @@ ADD_TIME_FUNCTION_IMPL(AddWeeksImpl, weeks_add, WEEK); ADD_TIME_FUNCTION_IMPL(AddMonthsImpl, months_add, MONTH); ADD_TIME_FUNCTION_IMPL(AddYearsImpl, years_add, YEAR); -template +template struct AddQuartersImpl { using ReturnType = - std::conditional_t || - std::is_same_v, + std::conditional_t || + std::is_same_v, DataTypeDateTime, - std::conditional_t, + std::conditional_t, DataTypeDateV2, DataTypeDateTimeV2>>; - using InputNativeType = std::conditional_t< - std::is_same_v || std::is_same_v, - Int64, std::conditional_t, UInt32, UInt64>>; - using ReturnNativeType = std::conditional_t< - std::is_same_v || std::is_same_v, - Int64, std::conditional_t, UInt32, UInt64>>; + using InputNativeType = ArgType::FieldType; + using ReturnNativeType = ReturnType::FieldType; static constexpr auto name = "quarters_add"; static constexpr auto is_nullable = true; - static inline ReturnNativeType execute(const InputNativeType& t, Int64 delta, bool& is_null) { - if constexpr (std::is_same_v || - std::is_same_v) { - return date_time_add(t, delta, is_null); - } else if constexpr (std::is_same_v) { - return date_time_add, - DateV2Value, ReturnNativeType>(t, delta, is_null); - } else { - return date_time_add, - DateV2Value, ReturnNativeType>(t, delta, - is_null); - } + static inline ReturnNativeType execute(const InputNativeType& t, Int32 delta, bool& is_null) { + return date_time_add(t, 3 * delta, is_null); } - static DataTypes get_variadic_argument_types() { return {std::make_shared()}; } + static DataTypes get_variadic_argument_types() { return {std::make_shared()}; } }; template struct SubtractIntervalImpl { using ReturnType = typename Transform::ReturnType; using InputNativeType = typename Transform::InputNativeType; + using ReturnNativeType = typename Transform::ReturnNativeType; static constexpr auto is_nullable = true; - static inline Int64 execute(const InputNativeType& t, Int64 delta, bool& is_null) { + static inline ReturnNativeType execute(const InputNativeType& t, Int32 delta, bool& is_null) { return Transform::execute(t, -delta, is_null); } @@ -244,57 +210,49 @@ struct SubtractYearsImpl : SubtractIntervalImpl, DateType static constexpr auto name = "years_sub"; }; -#define DECLARE_DATE_FUNCTIONS(NAME, FN_NAME, RETURN_TYPE, STMT) \ - template \ - struct NAME { \ - using ArgType1 = std::conditional_t< \ - std::is_same_v, UInt32, \ - std::conditional_t, UInt64, Int64>>; \ - using ArgType2 = std::conditional_t< \ - std::is_same_v, UInt32, \ - std::conditional_t, UInt64, Int64>>; \ - using DateValueType1 = std::conditional_t< \ - std::is_same_v, DateV2Value, \ - std::conditional_t, \ - DateV2Value, VecDateTimeValue>>; \ - using DateValueType2 = std::conditional_t< \ - std::is_same_v, DateV2Value, \ - std::conditional_t, \ - DateV2Value, VecDateTimeValue>>; \ - using ReturnType = RETURN_TYPE; \ - static constexpr auto name = #FN_NAME; \ - static constexpr auto is_nullable = false; \ - static inline ReturnType::FieldType execute(const ArgType1& t0, const ArgType2& t1, \ - bool& is_null) { \ - const auto& ts0 = reinterpret_cast(t0); \ - const auto& ts1 = reinterpret_cast(t1); \ - is_null = !ts0.is_valid_date() || !ts1.is_valid_date(); \ - return STMT; \ - } \ - static DataTypes get_variadic_argument_types() { \ - return {std::make_shared(), std::make_shared()}; \ - } \ +#define DECLARE_DATE_FUNCTIONS(NAME, FN_NAME, RETURN_TYPE, STMT) \ + template \ + struct NAME { \ + using NativeType1 = DateType1::FieldType; \ + using NativeType2 = DateType2::FieldType; \ + using DateValueType1 = date_cast::TypeToValueTypeV; \ + using DateValueType2 = date_cast::TypeToValueTypeV; \ + using ReturnType = RETURN_TYPE; \ + \ + static constexpr auto name = #FN_NAME; \ + static constexpr auto is_nullable = false; \ + static inline ReturnType::FieldType execute(const NativeType1& t0, const NativeType2& t1, \ + bool& is_null) { \ + const auto& ts0 = reinterpret_cast(t0); \ + const auto& ts1 = reinterpret_cast(t1); \ + is_null = !ts0.is_valid_date() || !ts1.is_valid_date(); \ + return (STMT); \ + } \ + static DataTypes get_variadic_argument_types() { \ + return {std::make_shared(), std::make_shared()}; \ + } \ }; + DECLARE_DATE_FUNCTIONS(DateDiffImpl, datediff, DataTypeInt32, (ts0.daynr() - ts1.daynr())); // DECLARE_DATE_FUNCTIONS(TimeDiffImpl, timediff, DataTypeTime, ts0.second_diff(ts1)); -// Expands to +// Expands to below here because it use Time type which need some special deal. template struct TimeDiffImpl { - using DateValueType1 = date_cast::TypeToValueTypeV; - using DateValueType2 = date_cast::TypeToValueTypeV; - using ArgType1 = date_cast::ValueTypeOfColumnV>; - using ArgType2 = date_cast::ValueTypeOfColumnV>; + using NativeType1 = date_cast::TypeToValueTypeV; + using NativeType2 = date_cast::TypeToValueTypeV; + using ArgType1 = DateType1::FieldType; + using ArgType2 = DateType2::FieldType; static constexpr bool UsingTimev2 = date_cast::IsV2() || date_cast::IsV2(); - using ReturnType = DataTypeTimeV2; + using ReturnType = DataTypeTimeV2; // TimeV1Type also use double as native type. same as v2. static constexpr auto name = "timediff"; static constexpr int64_t limit_value = 3020399000000; // 838:59:59 convert to microsecond static inline ReturnType::FieldType execute(const ArgType1& t0, const ArgType2& t1, bool& is_null) { - const auto& ts0 = reinterpret_cast(t0); - const auto& ts1 = reinterpret_cast(t1); + const auto& ts0 = reinterpret_cast(t0); + const auto& ts1 = reinterpret_cast(t1); is_null = !ts0.is_valid_date() || !ts1.is_valid_date(); if constexpr (UsingTimev2) { // refer to https://dev.mysql.com/doc/refman/5.7/en/time.html @@ -318,381 +276,138 @@ struct TimeDiffImpl { #define TIME_DIFF_FUNCTION_IMPL(CLASS, NAME, UNIT) \ DECLARE_DATE_FUNCTIONS(CLASS, NAME, DataTypeInt64, datetime_diff(ts1, ts0)) +// all these functions implemented by datediff TIME_DIFF_FUNCTION_IMPL(YearsDiffImpl, years_diff, YEAR); TIME_DIFF_FUNCTION_IMPL(MonthsDiffImpl, months_diff, MONTH); TIME_DIFF_FUNCTION_IMPL(WeeksDiffImpl, weeks_diff, WEEK); TIME_DIFF_FUNCTION_IMPL(DaysDiffImpl, days_diff, DAY); TIME_DIFF_FUNCTION_IMPL(HoursDiffImpl, hours_diff, HOUR); -TIME_DIFF_FUNCTION_IMPL(MintueSDiffImpl, minutes_diff, MINUTE); +TIME_DIFF_FUNCTION_IMPL(MintuesDiffImpl, minutes_diff, MINUTE); TIME_DIFF_FUNCTION_IMPL(SecondsDiffImpl, seconds_diff, SECOND); TIME_DIFF_FUNCTION_IMPL(MilliSecondsDiffImpl, milliseconds_diff, MILLISECOND); TIME_DIFF_FUNCTION_IMPL(MicroSecondsDiffImpl, microseconds_diff, MICROSECOND); -#define TIME_FUNCTION_TWO_ARGS_IMPL(CLASS, NAME, FUNCTION, RETURN_TYPE) \ - template \ - struct CLASS { \ - using ArgType = std::conditional_t< \ - std::is_same_v, UInt32, \ - std::conditional_t, UInt64, Int64>>; \ - using DateValueType = std::conditional_t< \ - std::is_same_v, DateV2Value, \ - std::conditional_t, \ - DateV2Value, VecDateTimeValue>>; \ - using ReturnType = RETURN_TYPE; \ - static constexpr auto name = #NAME; \ - static constexpr auto is_nullable = false; \ - static inline ReturnType::FieldType execute(const ArgType& t0, const Int32 mode, \ - bool& is_null) { \ - const auto& ts0 = reinterpret_cast(t0); \ - is_null = !ts0.is_valid_date(); \ - return ts0.FUNCTION; \ - } \ - static DataTypes get_variadic_argument_types() { \ - return {std::make_shared(), std::make_shared()}; \ - } \ +#define TIME_FUNCTION_TWO_ARGS_IMPL(CLASS, NAME, FUNCTION, RETURN_TYPE) \ + template \ + struct CLASS { \ + using ArgType = DateType::FieldType; \ + using DateValueType = date_cast::TypeToValueTypeV; \ + using ReturnType = RETURN_TYPE; \ + \ + static constexpr auto name = #NAME; \ + static constexpr auto is_nullable = false; \ + static inline ReturnType::FieldType execute(const ArgType& t0, const Int32 mode, \ + bool& is_null) { \ + const auto& ts0 = reinterpret_cast(t0); \ + is_null = !ts0.is_valid_date(); \ + return ts0.FUNCTION; \ + } \ + static DataTypes get_variadic_argument_types() { \ + return {std::make_shared(), std::make_shared()}; \ + } \ } TIME_FUNCTION_TWO_ARGS_IMPL(ToYearWeekTwoArgsImpl, yearweek, year_week(mysql_week_mode(mode)), DataTypeInt32); TIME_FUNCTION_TWO_ARGS_IMPL(ToWeekTwoArgsImpl, week, week(mysql_week_mode(mode)), DataTypeInt8); -template +// only use for FunctionDateOrDateTimeComputation. FromTypes are NativeTypes. +template struct DateTimeOp { - // use for (DateTime, DateTime) -> other_type - static void vector_vector(const PaddedPODArray& vec_from0, - const PaddedPODArray& vec_from1, - PaddedPODArray& vec_to, NullMap& null_map) { - size_t size = vec_from0.size(); - vec_to.resize(size); - null_map.resize_fill(size, false); - - for (size_t i = 0; i < size; ++i) { - // here reinterpret_cast is used to convert uint8& to bool&, - // otherwise it will be implicitly converted to bool, causing the rvalue to fail to match the lvalue. - // the same goes for the following. - vec_to[i] = Transform::execute(vec_from0[i], vec_from1[i], - reinterpret_cast(null_map[i])); - } - } - static void vector_vector(const PaddedPODArray& vec_from0, - const PaddedPODArray& vec_from1, - PaddedPODArray& vec_to) { - size_t size = vec_from0.size(); - vec_to.resize(size); - - bool invalid = true; - for (size_t i = 0; i < size; ++i) { - // here reinterpret_cast is used to convert uint8& to bool&, - // otherwise it will be implicitly converted to bool, causing the rvalue to fail to match the lvalue. - // the same goes for the following. - vec_to[i] = Transform::execute(vec_from0[i], vec_from1[i], invalid); - - if (UNLIKELY(invalid)) { - throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} {} {} out of range", - Transform::name, vec_from0[i], vec_from1[i]); - } + using NativeType0 = DataType0::FieldType; + using NativeType1 = DataType1::FieldType; + using ValueType0 = date_cast::TypeToValueTypeV; + // arg1 maybe just delta value(e.g. DataTypeInt32, not datelike type) + constexpr static bool CastType1 = std::is_same_v || + std::is_same_v || + std::is_same_v || + std::is_same_v; + + static void throw_out_of_bound(NativeType0 arg0, NativeType1 arg1) { + auto value0 = binary_cast(arg0); + char buf0[40]; + char* end0 = value0.to_string(buf0); + if constexpr (CastType1) { + auto value1 = binary_cast>(arg1); + char buf1[40]; + char* end1 = value1.to_string(buf1); + throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} of {}, {} out of range", + Transform::name, std::string_view {buf0, end0 - 1}, + std::string_view {buf1, end1 - 1}); // minus 1 to skip /0 + } else { + throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} of {}, {} out of range", + Transform::name, std::string_view {buf0, end0 - 1}, arg1); } } - // use for (DateTime, int32) -> other_type - static void vector_vector(const PaddedPODArray& vec_from0, - const PaddedPODArray& vec_from1, - PaddedPODArray& vec_to, NullMap& null_map) { - size_t size = vec_from0.size(); - vec_to.resize(size); - null_map.resize_fill(size, false); - - for (size_t i = 0; i < size; ++i) - vec_to[i] = Transform::execute(vec_from0[i], vec_from1[i], - reinterpret_cast(null_map[i])); - } - static void vector_vector(const PaddedPODArray& vec_from0, - const PaddedPODArray& vec_from1, - PaddedPODArray& vec_to) { + // execute on the null value's nested value may cause false positive exception, so use nullmaps to skip them. + static void vector_vector(const PaddedPODArray& vec_from0, + const PaddedPODArray& vec_from1, + PaddedPODArray& vec_to, const NullMap* nullmap0, + const NullMap* nullmap1) { size_t size = vec_from0.size(); vec_to.resize(size); + bool invalid = false; - bool invalid = true; for (size_t i = 0; i < size; ++i) { + if ((nullmap0 && (*nullmap0)[i]) || (nullmap1 && (*nullmap1)[i])) [[unlikely]] { + continue; + } vec_to[i] = Transform::execute(vec_from0[i], vec_from1[i], invalid); if (UNLIKELY(invalid)) { - throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} {} {} out of range", - Transform::name, vec_from0[i], vec_from1[i]); + throw_out_of_bound(vec_from0[i], vec_from1[i]); } } } - // use for (DateTime, const DateTime) -> other_type - static void vector_constant(const PaddedPODArray& vec_from, - PaddedPODArray& vec_to, NullMap& null_map, Int128& delta) { - size_t size = vec_from.size(); - vec_to.resize(size); - null_map.resize_fill(size, false); - - for (size_t i = 0; i < size; ++i) { - vec_to[i] = - Transform::execute(vec_from[i], delta, reinterpret_cast(null_map[i])); + static void vector_constant(const PaddedPODArray& vec_from, + PaddedPODArray& vec_to, const NativeType1& delta, + const NullMap* nullmap0, const NullMap* nullmap1) { + if (nullmap1 && (*nullmap1)[0]) [[unlikely]] { + return; } - } - static void vector_constant(const PaddedPODArray& vec_from, - PaddedPODArray& vec_to, Int128& delta) { size_t size = vec_from.size(); vec_to.resize(size); + bool invalid = false; - bool invalid = true; for (size_t i = 0; i < size; ++i) { - vec_to[i] = Transform::execute(vec_from[i], delta, invalid); - - if (UNLIKELY(invalid)) { - throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} {} {} out of range", - Transform::name, vec_from[i], delta); + if (nullmap0 && (*nullmap0)[i]) [[unlikely]] { + continue; } - } - } - - // use for (DateTime, const ColumnNumber) -> other_type - static void vector_constant(const PaddedPODArray& vec_from, - PaddedPODArray& vec_to, NullMap& null_map, Int64 delta) { - size_t size = vec_from.size(); - vec_to.resize(size); - null_map.resize_fill(size, false); - - for (size_t i = 0; i < size; ++i) { - vec_to[i] = - Transform::execute(vec_from[i], delta, reinterpret_cast(null_map[i])); - } - } - static void vector_constant(const PaddedPODArray& vec_from, - PaddedPODArray& vec_to, Int64 delta) { - size_t size = vec_from.size(); - vec_to.resize(size); - bool invalid = true; - - for (size_t i = 0; i < size; ++i) { vec_to[i] = Transform::execute(vec_from[i], delta, invalid); if (UNLIKELY(invalid)) { - throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} {} {} out of range", - Transform::name, vec_from[i], delta); + throw_out_of_bound(vec_from[i], delta); } } } - // use for (const DateTime, ColumnNumber) -> other_type - static void constant_vector(const FromType1& from, PaddedPODArray& vec_to, - NullMap& null_map, const IColumn& delta) { - size_t size = delta.size(); - vec_to.resize(size); - null_map.resize_fill(size, false); - - for (size_t i = 0; i < size; ++i) { - vec_to[i] = Transform::execute(from, delta.get_int(i), - reinterpret_cast(null_map[i])); + static void constant_vector(const NativeType0& from, PaddedPODArray& vec_to, + const PaddedPODArray& delta, const NullMap* nullmap0, + const NullMap* nullmap1) { + if (nullmap0 && (*nullmap0)[0]) [[unlikely]] { + return; } - } - static void constant_vector(const FromType1& from, PaddedPODArray& vec_to, - const IColumn& delta) { size_t size = delta.size(); vec_to.resize(size); - bool invalid = true; + bool invalid = false; for (size_t i = 0; i < size; ++i) { - vec_to[i] = Transform::execute(from, delta.get_int(i), invalid); - - if (UNLIKELY(invalid)) { - throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} {} {} out of range", - Transform::name, from, delta.get_int(i)); + if (nullmap1 && (*nullmap1)[i]) [[unlikely]] { + continue; } - } - } - - static void constant_vector(const FromType1& from, PaddedPODArray& vec_to, - NullMap& null_map, const PaddedPODArray& delta) { - size_t size = delta.size(); - vec_to.resize(size); - null_map.resize_fill(size, false); - - for (size_t i = 0; i < size; ++i) { - vec_to[i] = Transform::execute(from, delta[i], reinterpret_cast(null_map[i])); - } - } - - static void constant_vector(const FromType1& from, PaddedPODArray& vec_to, - const PaddedPODArray& delta) { - size_t size = delta.size(); - vec_to.resize(size); - bool invalid = true; - - for (size_t i = 0; i < size; ++i) { vec_to[i] = Transform::execute(from, delta[i], invalid); if (UNLIKELY(invalid)) { - throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} {} {} out of range", - Transform::name, from, delta[i]); - } - } - } -}; - -template -struct DateTimeAddIntervalImpl { - static Status execute(Block& block, const ColumnNumbers& arguments, uint32_t result, - size_t input_rows_count) { - using ToType = typename Transform::ReturnType::FieldType; - using Op = DateTimeOp; - - const ColumnPtr source_col = remove_nullable(block.get_by_position(arguments[0]).column); - const auto is_nullable = block.get_by_position(result).type->is_nullable(); - if (const auto* sources = check_and_get_column>(source_col.get())) { - auto col_to = ColumnVector::create(); - auto delta_column_ptr = remove_nullable(block.get_by_position(arguments[1]).column); - const IColumn& delta_column = *delta_column_ptr; - - if (is_nullable) { - auto null_map = ColumnUInt8::create(input_rows_count, 0); - if (const auto* delta_const_column = - typeid_cast(&delta_column)) { - if (delta_const_column->get_field().get_type() == Field::Types::Int128) { - Op::vector_constant(sources->get_data(), col_to->get_data(), - null_map->get_data(), - delta_const_column->get_field().get()); - } else if (delta_const_column->get_field().get_type() == Field::Types::Int64) { - Op::vector_constant(sources->get_data(), col_to->get_data(), - null_map->get_data(), - delta_const_column->get_field().get()); - } else if (delta_const_column->get_field().get_type() == Field::Types::UInt64) { - Op::vector_constant(sources->get_data(), col_to->get_data(), - null_map->get_data(), - delta_const_column->get_field().get()); - } else { - Op::vector_constant(sources->get_data(), col_to->get_data(), - null_map->get_data(), - delta_const_column->get_field().get()); - } - } else { - if (const auto* delta_vec_column0 = - check_and_get_column>(delta_column)) { - Op::vector_vector(sources->get_data(), delta_vec_column0->get_data(), - col_to->get_data(), null_map->get_data()); - } else { - const auto* delta_vec_column1 = - check_and_get_column>(delta_column); - DCHECK(delta_vec_column1 != nullptr); - Op::vector_vector(sources->get_data(), delta_vec_column1->get_data(), - col_to->get_data(), null_map->get_data()); - } - } - if (const auto* nullable_col = check_and_get_column( - block.get_by_position(arguments[0]).column.get())) { - NullMap& result_null_map = assert_cast(*null_map).get_data(); - const NullMap& src_null_map = - assert_cast(nullable_col->get_null_map_column()) - .get_data(); - - VectorizedUtils::update_null_map(result_null_map, src_null_map); - } - if (const auto* nullable_col = check_and_get_column( - block.get_by_position(arguments[1]).column.get())) { - NullMap& result_null_map = assert_cast(*null_map).get_data(); - const NullMap& src_null_map = - assert_cast(nullable_col->get_null_map_column()) - .get_data(); - - VectorizedUtils::update_null_map(result_null_map, src_null_map); - } - block.get_by_position(result).column = - ColumnNullable::create(std::move(col_to), std::move(null_map)); - } else { - if (const auto* delta_const_column = - typeid_cast(&delta_column)) { - if (delta_const_column->get_field().get_type() == Field::Types::Int128) { - Op::vector_constant(sources->get_data(), col_to->get_data(), - delta_const_column->get_field().get()); - } else if (delta_const_column->get_field().get_type() == Field::Types::Int64) { - Op::vector_constant(sources->get_data(), col_to->get_data(), - delta_const_column->get_field().get()); - } else if (delta_const_column->get_field().get_type() == Field::Types::UInt64) { - Op::vector_constant(sources->get_data(), col_to->get_data(), - delta_const_column->get_field().get()); - } else { - Op::vector_constant(sources->get_data(), col_to->get_data(), - delta_const_column->get_field().get()); - } - } else { - if (const auto* delta_vec_column0 = - check_and_get_column>(delta_column)) { - Op::vector_vector(sources->get_data(), delta_vec_column0->get_data(), - col_to->get_data()); - } else { - const auto* delta_vec_column1 = - check_and_get_column>(delta_column); - DCHECK(delta_vec_column1 != nullptr); - Op::vector_vector(sources->get_data(), delta_vec_column1->get_data(), - col_to->get_data()); - } - } - block.replace_by_position(result, std::move(col_to)); + throw_out_of_bound(from, delta[i]); } - } else if (const auto* sources_const = - check_and_get_column_const>(source_col.get())) { - auto col_to = ColumnVector::create(); - if (is_nullable) { - auto null_map = ColumnUInt8::create(input_rows_count, 0); - auto not_nullable_column_ptr_arg1 = - remove_nullable(block.get_by_position(arguments[1]).column); - if (const auto* delta_vec_column = check_and_get_column>( - *not_nullable_column_ptr_arg1)) { - Op::constant_vector(sources_const->template get_value(), - col_to->get_data(), null_map->get_data(), - delta_vec_column->get_data()); - } else { - Op::constant_vector(sources_const->template get_value(), - col_to->get_data(), null_map->get_data(), - *not_nullable_column_ptr_arg1); - } - if (const auto* nullable_col = check_and_get_column( - block.get_by_position(arguments[0]).column.get())) { - NullMap& result_null_map = assert_cast(*null_map).get_data(); - const NullMap& src_null_map = - assert_cast(nullable_col->get_null_map_column()) - .get_data(); - - VectorizedUtils::update_null_map(result_null_map, src_null_map); - } - if (const auto* nullable_col = check_and_get_column( - block.get_by_position(arguments[1]).column.get())) { - NullMap& result_null_map = assert_cast(*null_map).get_data(); - const NullMap& src_null_map = - assert_cast(nullable_col->get_null_map_column()) - .get_data(); - - VectorizedUtils::update_null_map(result_null_map, src_null_map); - } - block.get_by_position(result).column = - ColumnNullable::create(std::move(col_to), std::move(null_map)); - } else { - if (const auto* delta_vec_column = check_and_get_column>( - *block.get_by_position(arguments[1]).column)) { - Op::constant_vector(sources_const->template get_value(), - col_to->get_data(), delta_vec_column->get_data()); - } else { - Op::constant_vector(sources_const->template get_value(), - col_to->get_data(), - *block.get_by_position(arguments[1]).column); - } - block.replace_by_position(result, std::move(col_to)); - } - } else { - return Status::RuntimeError( - "Illegal column {} of first argument and type {} of function {}", - block.get_by_position(arguments[0]).column->get_name(), - block.get_by_position(arguments[0]).type->get_name(), Transform::name); } - return Status::OK(); } }; +// Used for date(time) add/sub date(time)/integer. the input types are variadic and dispatch in execute. the return type is +// decided by Transform template class FunctionDateOrDateTimeComputation : public IFunction { public: @@ -708,41 +423,14 @@ class FunctionDateOrDateTimeComputation : public IFunction { size_t get_number_of_arguments() const override { return 0; } DataTypes get_variadic_argument_types_impl() const override { - if constexpr (has_variadic_argument) return Transform::get_variadic_argument_types(); + if constexpr (has_variadic_argument) { + return Transform::get_variadic_argument_types(); + } return {}; } bool use_default_implementation_for_nulls() const override { return false; } DataTypePtr get_return_type_impl(const ColumnsWithTypeAndName& arguments) const override { - if (arguments.size() != 2 && arguments.size() != 3) { - throw doris::Exception(ErrorCode::INVALID_ARGUMENT, - "Number of arguments for function {} doesn't match: passed {} , " - "should be 2 or 3", - get_name(), arguments.size()); - } - - if (arguments.size() == 2) { - if (!is_date_or_datetime(remove_nullable(arguments[0].type)) && - !is_date_v2_or_datetime_v2(remove_nullable(arguments[0].type))) { - throw doris::Exception( - ErrorCode::INVALID_ARGUMENT, - "Illegal type {} of argument of function {}. Should be a date or a date " - "with time", - arguments[0].type->get_name(), get_name()); - } - } else { - if (!WhichDataType(remove_nullable(arguments[0].type)).is_date_time() || - !WhichDataType(remove_nullable(arguments[0].type)).is_date_time_v2() || - !WhichDataType(remove_nullable(arguments[2].type)).is_string()) { - throw doris::Exception( - ErrorCode::INVALID_ARGUMENT, - "Function {} supports 2 or 3 arguments. The 1st argument must be of type " - "Date or DateTime. The 2nd argument must be number. The 3rd argument " - "(optional) must be a constant string with timezone name. The timezone " - "argument is allowed only when the 1st argument has the type DateTime", - get_name()); - } - } RETURN_REAL_TYPE_FOR_DATEV2_FUNCTION(typename Transform::ReturnType); } @@ -753,48 +441,164 @@ class FunctionDateOrDateTimeComputation : public IFunction { WhichDataType which1(remove_nullable(first_arg_type)); WhichDataType which2(remove_nullable(second_arg_type)); + /// now dispatch with the two arguments' type. no need to consider return type because the same arguments decide a + /// unique return type which could be extracted from Transform. + + // for all `xxx_add/sub`, the second arg is int32. + // for `week/yearweek`, if it has the second arg, it's int32. + // in these situations, the first would be any datelike type. + if (which2.is_int32()) { + switch (which1.idx) { + case TypeIndex::Date: + return execute_inner(block, arguments, result, + input_rows_count); + break; + case TypeIndex::DateTime: + return execute_inner(block, arguments, result, + input_rows_count); + break; + case TypeIndex::DateV2: + return execute_inner(block, arguments, result, + input_rows_count); + break; + case TypeIndex::DateTimeV2: + return execute_inner(block, arguments, result, + input_rows_count); + break; + default: + return Status::InternalError("Illegal argument {} and {} of function {}", + block.get_by_position(arguments[0]).type->get_name(), + block.get_by_position(arguments[1]).type->get_name(), + get_name()); + } + } + // then consider datelike - datelike. everything is possible here as well. + // for `xxx_diff`, every combination of V2 is possible. but for V1 we only support Datetime - Datetime if (which1.is_date_v2() && which2.is_date_v2()) { - return DateTimeAddIntervalImpl::execute(block, arguments, - result, - input_rows_count); + return execute_inner(block, arguments, result, + input_rows_count); } else if (which1.is_date_time_v2() && which2.is_date_time_v2()) { - return DateTimeAddIntervalImpl< - DataTypeDateTimeV2::FieldType, Transform, - DataTypeDateTimeV2::FieldType>::execute(block, arguments, result, - input_rows_count); - } else if (which1.is_date_time() && which2.is_date_time()) { - return DateTimeAddIntervalImpl::execute(block, arguments, - result, - input_rows_count); + return execute_inner(block, arguments, result, + input_rows_count); } else if (which1.is_date_v2() && which2.is_date_time_v2()) { - return DateTimeAddIntervalImpl< - DataTypeDateV2::FieldType, Transform, - DataTypeDateTimeV2::FieldType>::execute(block, arguments, result, - input_rows_count); + return execute_inner(block, arguments, result, + input_rows_count); } else if (which1.is_date_time_v2() && which2.is_date_v2()) { - return DateTimeAddIntervalImpl::execute(block, arguments, - result, - input_rows_count); - } else if (which1.is_date()) { - return DateTimeAddIntervalImpl::execute( - block, arguments, result, input_rows_count); - } else if (which1.is_date_time()) { - return DateTimeAddIntervalImpl::execute( - block, arguments, result, input_rows_count); - } else if (which1.is_date_v2()) { - return DateTimeAddIntervalImpl::execute( - block, arguments, result, input_rows_count); - } else if (which1.is_date_time_v2()) { - return DateTimeAddIntervalImpl::execute( - block, arguments, result, input_rows_count); - } else { - return Status::RuntimeError("Illegal type {} of argument of function {}", - block.get_by_position(arguments[0]).type->get_name(), - get_name()); + return execute_inner(block, arguments, result, + input_rows_count); + } else if (which1.is_date_time() && which2.is_date_time()) { + return execute_inner(block, arguments, result, + input_rows_count); } + return Status::InternalError("Illegal argument {} and {} of function {}", + block.get_by_position(arguments[0]).type->get_name(), + block.get_by_position(arguments[1]).type->get_name(), + get_name()); + } + + template + static Status execute_inner(Block& block, const ColumnNumbers& arguments, uint32_t result, + size_t input_rows_count) { + using NativeType0 = DataType0::FieldType; + using NativeType1 = DataType1::FieldType; + using ResFieldType = typename Transform::ReturnType::FieldType; + using Op = DateTimeOp; + + auto get_null_map = [](const ColumnPtr& col) -> const NullMap* { + if (col->is_nullable()) { + return &static_cast(*col).get_null_map_data(); + } + // Const(Nullable) + if (const auto* const_col = check_and_get_column(col.get()); + const_col != nullptr && const_col->is_concrete_nullable()) { + return &static_cast(const_col->get_data_column()) + .get_null_map_data(); + } + return nullptr; + }; + + //ATTN: those null maps may be nullmap of ColumnConst(only 1 row) + // src column is always datelike type. + ColumnPtr& col0 = block.get_by_position(arguments[0]).column; + const NullMap* nullmap0 = get_null_map(col0); + // the second column may be delta column(xx_add/sub) or datelike column(xxx_diff) + ColumnPtr& col1 = block.get_by_position(arguments[1]).column; + const NullMap* nullmap1 = get_null_map(col1); + + // if null wrapped, extract nested column as src_nested_col + const ColumnPtr src_nested_col = remove_nullable(col0); + const auto result_nullable = block.get_by_position(result).type->is_nullable(); + auto res_col = ColumnVector::create(); + + // vector-const or vector-vector + if (const auto* sources = + check_and_get_column>(src_nested_col.get())) { + const ColumnPtr nest_col1 = remove_nullable(col1); + bool rconst = false; + // vector-const + if (const auto* nest_col1_const = check_and_get_column(*nest_col1)) { + rconst = true; + const auto col1_inside_const = assert_cast&>( + nest_col1_const->get_data_column()); + Op::vector_constant(sources->get_data(), res_col->get_data(), + col1_inside_const.get_data()[0], nullmap0, nullmap1); + } else { // vector-vector + const auto concrete_col1 = + assert_cast&>(*nest_col1); + Op::vector_vector(sources->get_data(), concrete_col1.get_data(), + res_col->get_data(), nullmap0, nullmap1); + } + + // update result nullmap with inputs + if (result_nullable) { + auto null_map = ColumnBool::create(input_rows_count, 0); + NullMap& result_null_map = assert_cast(*null_map).get_data(); + if (nullmap0) { + VectorizedUtils::update_null_map(result_null_map, *nullmap0); + } + if (nullmap1) { + VectorizedUtils::update_null_map(result_null_map, *nullmap1, rconst); + } + block.get_by_position(result).column = + ColumnNullable::create(std::move(res_col), std::move(null_map)); + } else { + block.replace_by_position(result, std::move(res_col)); + } + } else if (const auto* sources_const = + check_and_get_column_const>( + src_nested_col.get())) { + // const-vector + const auto col0_inside_const = + assert_cast&>(sources_const->get_data_column()); + const ColumnPtr nested_col1 = remove_nullable(col1); + const auto concrete_col1 = assert_cast&>(*nested_col1); + Op::constant_vector(col0_inside_const.get_data()[0], res_col->get_data(), + concrete_col1.get_data(), nullmap0, nullmap1); + + // update result nullmap with inputs + if (result_nullable) { + auto null_map = ColumnBool::create(input_rows_count, 0); + NullMap& result_null_map = assert_cast(*null_map).get_data(); + if (nullmap0) { + VectorizedUtils::update_null_map(result_null_map, *nullmap0, true); + } + if (nullmap1) { // no const-const here. default impl deal it. + VectorizedUtils::update_null_map(result_null_map, *nullmap1); + } + block.get_by_position(result).column = + ColumnNullable::create(std::move(res_col), std::move(null_map)); + } else { + block.replace_by_position(result, std::move(res_col)); + } + } else { // no const-const here. default impl deal it. + return Status::InternalError( + "Illegel columns for function {}:\n1: {} with type {}\n2: {} with type {}", + Transform::name, block.get_by_position(arguments[0]).name, + block.get_by_position(arguments[0]).type->get_name(), + block.get_by_position(arguments[1]).name, + block.get_by_position(arguments[1]).type->get_name()); + } + return Status::OK(); } }; @@ -878,7 +682,7 @@ struct CurrentDateTimeImpl { bool use_const; if constexpr (WithPrecision) { if (const auto* const_column = check_and_get_column( - block.get_by_position(arguments[0]).column)) { + block.get_by_position(arguments[0]).column.get())) { int64_t scale = const_column->get_int(0); dtv.from_unixtime(context->state()->timestamp_ms() / 1000, context->state()->nano_seconds(), @@ -892,7 +696,7 @@ struct CurrentDateTimeImpl { use_const = true; } else if (const auto* nullable_column = check_and_get_column( - block.get_by_position(arguments[0]).column)) { + block.get_by_position(arguments[0]).column.get())) { const auto& null_map = nullable_column->get_null_map_data(); const auto& nested_column = assert_cast( nullable_column->get_nested_column_ptr().get()); @@ -1170,7 +974,9 @@ class CurrentDateFunctionBuilder : public FunctionBuilderImpl { FunctionBasePtr build_impl(const ColumnsWithTypeAndName& arguments, const DataTypePtr& return_type) const override { DataTypes data_types(arguments.size()); - for (size_t i = 0; i < arguments.size(); ++i) data_types[i] = arguments[i].type; + for (size_t i = 0; i < arguments.size(); ++i) { + data_types[i] = arguments[i].type; + } if (is_date_v2(return_type)) { auto function = FunctionCurrentDateOrDateTime< CurrentDateImpl>::create(); diff --git a/be/src/vec/functions/function_date_or_datetime_computation_v2.cpp b/be/src/vec/functions/function_date_or_datetime_computation_v2.cpp index ec9560456c131a..db43bf1818d38f 100644 --- a/be/src/vec/functions/function_date_or_datetime_computation_v2.cpp +++ b/be/src/vec/functions/function_date_or_datetime_computation_v2.cpp @@ -95,14 +95,14 @@ using FunctionDatetimeV2SubYears = FUNCTION_DATEV2_WITH_TWO_ARGS(NAME, IMPL, DataTypeDateTimeV2, DataTypeDateV2) \ FUNCTION_DATEV2_WITH_TWO_ARGS(NAME, IMPL, DataTypeDateV2, DataTypeDateTimeV2) \ FUNCTION_DATEV2_WITH_TWO_ARGS(NAME, IMPL, DataTypeDateV2, DataTypeDateV2) - +// these diff functions accept all v2 types. but for v1 only datetime. ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2DateDiff, DateDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2TimeDiff, TimeDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2YearsDiff, YearsDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2MonthsDiff, MonthsDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2WeeksDiff, WeeksDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2HoursDiff, HoursDiffImpl) -ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2MinutesDiff, MintueSDiffImpl) +ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2MinutesDiff, MintuesDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2SecondsDiff, SecondsDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2DaysDiff, DaysDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2MilliSecondsDiff, MilliSecondsDiffImpl) diff --git a/be/src/vec/functions/function_helpers.h b/be/src/vec/functions/function_helpers.h index 8c7eec28fe2f6f..818badeee4551b 100644 --- a/be/src/vec/functions/function_helpers.h +++ b/be/src/vec/functions/function_helpers.h @@ -20,10 +20,8 @@ #pragma once -#include - +#include #include -#include #include "vec/columns/column.h" #include "vec/columns/column_const.h" @@ -53,11 +51,15 @@ const Type* check_and_get_data_type(const IDataType* data_type) { template const ColumnConst* check_and_get_column_const(const IColumn* column) { - if (!column || !is_column_const(*column)) return {}; + if (!column || !is_column_const(*column)) { + return nullptr; + } - const ColumnConst* res = assert_cast(column); + const auto* res = assert_cast(column); - if (!check_column(&res->get_data_column())) return {}; + if (!check_column(&res->get_data_column())) { + return nullptr; + } return res; } @@ -66,7 +68,9 @@ template const Type* check_and_get_column_constData(const IColumn* column) { const ColumnConst* res = check_and_get_column_const(column); - if (!res) return {}; + if (!res) { + return nullptr; + } return static_cast(&res->get_data_column()); } diff --git a/be/src/vec/functions/function_ip.h b/be/src/vec/functions/function_ip.h index 67edad5015aeaf..9f2f4dc28868b4 100644 --- a/be/src/vec/functions/function_ip.h +++ b/be/src/vec/functions/function_ip.h @@ -615,8 +615,13 @@ class FunctionIsIPAddressInRange : public IFunction { for (size_t i = 0; i < input_rows_count; ++i) { auto addr_idx = index_check_const(i, addr_const); auto cidr_idx = index_check_const(i, cidr_const); - const auto cidr = - parse_ip_with_cidr(str_cidr_column->get_data_at(cidr_idx).to_string_view()); + auto cidr_data = str_cidr_column->get_data_at(cidr_idx); + // cidr_data maybe NULL, But the input column is nested column, so check here avoid throw exception + if (cidr_data.data == nullptr || cidr_data.size == 0) { + col_res_data[i] = 0; + continue; + } + const auto cidr = parse_ip_with_cidr(cidr_data.to_string_view()); if constexpr (PT == PrimitiveType::TYPE_IPV4) { if (cidr._address.as_v4()) { col_res_data[i] = match_ipv4_subnet(ip_data[addr_idx], cidr._address.as_v4(), @@ -763,11 +768,13 @@ class FunctionIsIPAddressInRange : public IFunction { if (is_ipv4(addr_column_with_type_and_name.type)) { execute_impl_with_ip( input_rows_count, addr_const, cidr_const, - assert_cast(cidr_column.get()), addr_column, col_res); + assert_cast(cidr_column.get()), addr_column, + col_res.get()); } else if (is_ipv6(addr_column_with_type_and_name.type)) { execute_impl_with_ip( input_rows_count, addr_const, cidr_const, - assert_cast(cidr_column.get()), addr_column, col_res); + assert_cast(cidr_column.get()), addr_column, + col_res.get()); } else { const auto* str_addr_column = assert_cast(addr_column.get()); const auto* str_cidr_column = assert_cast(cidr_column.get()); @@ -775,11 +782,15 @@ class FunctionIsIPAddressInRange : public IFunction { for (size_t i = 0; i < input_rows_count; ++i) { auto addr_idx = index_check_const(i, addr_const); auto cidr_idx = index_check_const(i, cidr_const); - - const auto addr = - IPAddressVariant(str_addr_column->get_data_at(addr_idx).to_string_view()); - const auto cidr = - parse_ip_with_cidr(str_cidr_column->get_data_at(cidr_idx).to_string_view()); + auto addr_data = str_addr_column->get_data_at(addr_idx); + auto cidr_data = str_cidr_column->get_data_at(cidr_idx); + // cidr_data maybe NULL, But the input column is nested column, so check here avoid throw exception + if (cidr_data.data == nullptr || cidr_data.size == 0) { + col_res_data[i] = 0; + continue; + } + const auto addr = IPAddressVariant(addr_data.to_string_view()); + const auto cidr = parse_ip_with_cidr(cidr_data.to_string_view()); col_res_data[i] = is_address_in_range(addr, cidr) ? 1 : 0; } } diff --git a/be/src/vec/functions/function_jsonb.cpp b/be/src/vec/functions/function_jsonb.cpp index 463508169aadc6..dcae26f3c2f844 100644 --- a/be/src/vec/functions/function_jsonb.cpp +++ b/be/src/vec/functions/function_jsonb.cpp @@ -459,11 +459,12 @@ class FunctionJsonbKeys : public IFunction { // prepare jsonb data column jsonb_data_column = unpack_if_const(block.get_by_position(arguments[0]).column).first; if (block.get_by_position(arguments[0]).column->is_nullable()) { - const auto* nullable = check_and_get_column(jsonb_data_column); + const auto* nullable = check_and_get_column(jsonb_data_column.get()); jsonb_data_column = nullable->get_nested_column_ptr(); data_null_map = &nullable->get_null_map_data(); } - const ColumnString* col_from_string = check_and_get_column(jsonb_data_column); + const ColumnString* col_from_string = + check_and_get_column(jsonb_data_column.get()); // prepare parse path column prepare, maybe we do not have path column ColumnPtr jsonb_path_column = nullptr; @@ -475,11 +476,12 @@ class FunctionJsonbKeys : public IFunction { std::tie(jsonb_path_column, path_const) = unpack_if_const(block.get_by_position(arguments[1]).column); if (block.get_by_position(arguments[1]).column->is_nullable()) { - const auto* nullable = check_and_get_column(jsonb_path_column); + const auto* nullable = + check_and_get_column(jsonb_path_column.get()); jsonb_path_column = nullable->get_nested_column_ptr(); path_null_map = &nullable->get_null_map_data(); } - jsonb_path_col = check_and_get_column(jsonb_path_column); + jsonb_path_col = check_and_get_column(jsonb_path_column.get()); } auto null_map = ColumnUInt8::create(input_rows_count, 0); @@ -1844,9 +1846,10 @@ class FunctionJsonSearch : public IFunction { // prepare jsonb data column std::tie(col_json, json_is_const) = unpack_if_const(block.get_by_position(arguments[0]).column); - const ColumnString* col_json_string = check_and_get_column(col_json); - if (auto* nullable = check_and_get_column(col_json)) { - col_json_string = check_and_get_column(nullable->get_nested_column_ptr()); + const ColumnString* col_json_string = check_and_get_column(col_json.get()); + if (auto* nullable = check_and_get_column(col_json.get())) { + col_json_string = + check_and_get_column(nullable->get_nested_column_ptr().get()); } if (!col_json_string) { @@ -1873,8 +1876,8 @@ class FunctionJsonSearch : public IFunction { // prepare jsonb data column std::tie(col_one, one_is_const) = unpack_if_const(block.get_by_position(arguments[1]).column); - const ColumnString* col_one_string = check_and_get_column(col_one); - if (auto* nullable = check_and_get_column(col_one)) { + const ColumnString* col_one_string = check_and_get_column(col_one.get()); + if (auto* nullable = check_and_get_column(col_one.get())) { col_one_string = check_and_get_column(*nullable->get_nested_column_ptr()); } if (!col_one_string) { @@ -1921,8 +1924,9 @@ class FunctionJsonSearch : public IFunction { std::tie(col_search, search_is_const) = unpack_if_const(block.get_by_position(arguments[2]).column); - const ColumnString* col_search_string = check_and_get_column(col_search); - if (auto* nullable = check_and_get_column(col_search)) { + const ColumnString* col_search_string = + check_and_get_column(col_search.get()); + if (auto* nullable = check_and_get_column(col_search.get())) { col_search_string = check_and_get_column(*nullable->get_nested_column_ptr()); } diff --git a/be/src/vec/functions/function_nullables.cpp b/be/src/vec/functions/function_nullables.cpp index 91bce24f48fc8b..b1e72ff52a71f4 100644 --- a/be/src/vec/functions/function_nullables.cpp +++ b/be/src/vec/functions/function_nullables.cpp @@ -54,7 +54,8 @@ class FunctionNullable : public IFunction { Status execute_impl(FunctionContext* context, Block& block, const ColumnNumbers& arguments, uint32_t result, size_t input_rows_count) const override { ColumnPtr& col = block.get_by_position(arguments[0]).column; - if (const auto* col_null = check_and_get_column(col); col_null == nullptr) { + if (const auto* col_null = check_and_get_column(col.get()); + col_null == nullptr) { // not null block.replace_by_position( result, ColumnNullable::create(col, ColumnBool::create(input_rows_count, 0))); @@ -85,7 +86,7 @@ class FunctionNonNullable : public IFunction { Status execute_impl(FunctionContext* context, Block& block, const ColumnNumbers& arguments, uint32_t result, size_t input_rows_count) const override { auto& data = block.get_by_position(arguments[0]); - if (const auto* col_null = check_and_get_column(data.column); + if (const auto* col_null = check_and_get_column(data.column.get()); col_null == nullptr) // raise error if input is not nullable. { return Status::InvalidArgument( diff --git a/be/src/vec/functions/function_quantile_state.cpp b/be/src/vec/functions/function_quantile_state.cpp index 95afbf1db32d23..8f8740841c5407 100644 --- a/be/src/vec/functions/function_quantile_state.cpp +++ b/be/src/vec/functions/function_quantile_state.cpp @@ -130,7 +130,7 @@ class FunctionToQuantileState : public IFunction { const ColumnPtr& column = block.get_by_position(arguments[0]).column; const DataTypePtr& data_type = block.get_by_position(arguments[0]).type; auto compression_arg = check_and_get_column_const( - block.get_by_position(arguments.back()).column); + block.get_by_position(arguments.back()).column.get()); float compression = 2048; if (compression_arg) { auto compression_arg_val = compression_arg->get_value(); @@ -189,7 +189,7 @@ class FunctionQuantileStatePercent : public IFunction { auto str_col = assert_cast(column.get()); auto& col_data = str_col->get_data(); auto percent_arg = check_and_get_column_const( - block.get_by_position(arguments.back()).column); + block.get_by_position(arguments.back()).column.get()); if (!percent_arg) { return Status::InternalError( diff --git a/be/src/vec/functions/function_string.h b/be/src/vec/functions/function_string.h index 14926e1062c020..a729af5948a73f 100644 --- a/be/src/vec/functions/function_string.h +++ b/be/src/vec/functions/function_string.h @@ -2142,7 +2142,7 @@ class FunctionSplitByString : public IFunction { NullMapType* dest_nested_null_map = nullptr; auto* dest_nullable_col = reinterpret_cast(dest_nested_column); - dest_nested_column = dest_nullable_col->get_nested_column_ptr(); + dest_nested_column = dest_nullable_col->get_nested_column_ptr().get(); dest_nested_null_map = &dest_nullable_col->get_null_map_column().get_data(); const auto* col_left = check_and_get_column(src_column.get()); @@ -4436,7 +4436,7 @@ class FunctionTranslate : public IFunction { } else if (is_ascii) { impl_vectors = impl_vectors_ascii; } - impl_vectors(col_source, col_from, col_to, col_res); + impl_vectors(col_source, col_from, col_to, col_res.get()); block.get_by_position(result).column = std::move(col_res); return Status::OK(); } diff --git a/be/src/vec/functions/function_tokenize.cpp b/be/src/vec/functions/function_tokenize.cpp index 0bcd31af40dac7..f0a7c3b68aec49 100644 --- a/be/src/vec/functions/function_tokenize.cpp +++ b/be/src/vec/functions/function_tokenize.cpp @@ -129,7 +129,7 @@ Status FunctionTokenize::execute_impl(FunctionContext* /*context*/, Block& block NullMapType* dest_nested_null_map = nullptr; ColumnNullable* dest_nullable_col = reinterpret_cast(dest_nested_column); - dest_nested_column = dest_nullable_col->get_nested_column_ptr(); + dest_nested_column = dest_nullable_col->get_nested_column_ptr().get(); dest_nested_null_map = &dest_nullable_col->get_null_map_column().get_data(); if (auto col_left = check_and_get_column(src_column.get())) { diff --git a/be/src/vec/functions/functions_geo.cpp b/be/src/vec/functions/functions_geo.cpp index 6d75258d146ff7..0a752af18fe04c 100644 --- a/be/src/vec/functions/functions_geo.cpp +++ b/be/src/vec/functions/functions_geo.cpp @@ -258,10 +258,10 @@ struct StDistanceSphere { ColumnPtr y_lat_origin = block.get_by_position(arguments[3]).column->convert_to_full_column_if_const(); - const auto* x_lng = check_and_get_column(x_lng_origin); - const auto* x_lat = check_and_get_column(x_lat_origin); - const auto* y_lng = check_and_get_column(y_lng_origin); - const auto* y_lat = check_and_get_column(y_lat_origin); + const auto* x_lng = check_and_get_column(x_lng_origin.get()); + const auto* x_lat = check_and_get_column(x_lat_origin.get()); + const auto* y_lng = check_and_get_column(y_lng_origin.get()); + const auto* y_lat = check_and_get_column(y_lat_origin.get()); CHECK(x_lng && x_lat && y_lng && y_lat); const auto size = x_lng->size(); @@ -305,10 +305,10 @@ struct StAngleSphere { ColumnPtr y_lat_origin = block.get_by_position(arguments[3]).column->convert_to_full_column_if_const(); - const auto* x_lng = check_and_get_column(x_lng_origin); - const auto* x_lat = check_and_get_column(x_lat_origin); - const auto* y_lng = check_and_get_column(y_lng_origin); - const auto* y_lat = check_and_get_column(y_lat_origin); + const auto* x_lng = check_and_get_column(x_lng_origin.get()); + const auto* x_lat = check_and_get_column(x_lat_origin.get()); + const auto* y_lng = check_and_get_column(y_lng_origin.get()); + const auto* y_lat = check_and_get_column(y_lat_origin.get()); CHECK(x_lng && x_lat && y_lng && y_lat); const auto size = x_lng->size(); diff --git a/be/src/vec/functions/functions_logical.cpp b/be/src/vec/functions/functions_logical.cpp index 0f474851f032ee..f99f0447725edd 100644 --- a/be/src/vec/functions/functions_logical.cpp +++ b/be/src/vec/functions/functions_logical.cpp @@ -141,11 +141,11 @@ void basic_execute_impl(ColumnRawPtrs arguments, ColumnWithTypeAndName& result_i size_t input_rows_count) { auto col_res = ColumnUInt8::create(input_rows_count); if (auto l = check_and_get_column(arguments[0])) { - vector_const(arguments[1], l, col_res, input_rows_count); + vector_const(arguments[1], l, col_res.get(), input_rows_count); } else if (auto r = check_and_get_column(arguments[1])) { - vector_const(arguments[0], r, col_res, input_rows_count); + vector_const(arguments[0], r, col_res.get(), input_rows_count); } else { - vector_vector(arguments[0], arguments[1], col_res, input_rows_count); + vector_vector(arguments[0], arguments[1], col_res.get(), input_rows_count); } result_info.column = std::move(col_res); } @@ -156,11 +156,12 @@ void null_execute_impl(ColumnRawPtrs arguments, ColumnWithTypeAndName& result_in auto col_nulls = ColumnUInt8::create(input_rows_count); auto col_res = ColumnUInt8::create(input_rows_count); if (auto l = check_and_get_column(arguments[0])) { - vector_const_null(arguments[1], l, col_res, col_nulls, input_rows_count); + vector_const_null(arguments[1], l, col_res.get(), col_nulls.get(), input_rows_count); } else if (auto r = check_and_get_column(arguments[1])) { - vector_const_null(arguments[0], r, col_res, col_nulls, input_rows_count); + vector_const_null(arguments[0], r, col_res.get(), col_nulls.get(), input_rows_count); } else { - vector_vector_null(arguments[0], arguments[1], col_res, col_nulls, input_rows_count); + vector_vector_null(arguments[0], arguments[1], col_res.get(), col_nulls.get(), + input_rows_count); } result_info.column = ColumnNullable::create(std::move(col_res), std::move(col_nulls)); } diff --git a/be/src/vec/functions/in.h b/be/src/vec/functions/in.h index 5d590190182801..6f697ba7441df5 100644 --- a/be/src/vec/functions/in.h +++ b/be/src/vec/functions/in.h @@ -216,7 +216,7 @@ class FunctionIn : public IFunction { if (materialized_column->is_nullable()) { const auto* null_col_ptr = vectorized::check_and_get_column( - materialized_column); + materialized_column.get()); const auto& null_map = assert_cast( null_col_ptr->get_null_map_column()) .get_data(); diff --git a/be/src/vec/functions/least_greast.cpp b/be/src/vec/functions/least_greast.cpp index 7d1953f7041174..9ad53c4f531529 100644 --- a/be/src/vec/functions/least_greast.cpp +++ b/be/src/vec/functions/least_greast.cpp @@ -173,7 +173,7 @@ struct FunctionFieldImpl { size_t input_rows_count) { const auto& data_type = block.get_by_position(arguments[0]).type; auto result_column = ColumnInt32::create(input_rows_count, 0); - auto& res_data = static_cast(result_column)->get_data(); + auto& res_data = static_cast(result_column.get())->get_data(); const auto& column_size = arguments.size(); std::vector argument_columns(column_size); diff --git a/be/src/vec/functions/round.h b/be/src/vec/functions/round.h index 3f4f9c60fcbe3d..3b821f0aa528a4 100644 --- a/be/src/vec/functions/round.h +++ b/be/src/vec/functions/round.h @@ -731,6 +731,7 @@ class FunctionRounding : public IFunction { const auto* col_general = is_col_general_const ? assert_cast(*column_general.column) .get_data_column_ptr() + .get() : column_general.column.get(); ColumnPtr res; diff --git a/be/src/vec/sink/vtablet_block_convertor.cpp b/be/src/vec/sink/vtablet_block_convertor.cpp index 26de6ea6c7e3d1..466902a4f907ab 100644 --- a/be/src/vec/sink/vtablet_block_convertor.cpp +++ b/be/src/vec/sink/vtablet_block_convertor.cpp @@ -506,7 +506,8 @@ Status OlapTableBlockConvertor::_fill_auto_inc_cols(vectorized::Block* block, si vectorized::ColumnInt64::Container& dst_values = dst_column->get_data(); vectorized::ColumnPtr src_column_ptr = block->get_by_position(idx).column; - if (const auto* const_column = check_and_get_column(src_column_ptr)) { + if (const auto* const_column = + check_and_get_column(src_column_ptr.get())) { // for insert stmt like "insert into tbl1 select null,col1,col2,... from tbl2" or // "insert into tbl1 select 1,col1,col2,... from tbl2", the type of literal's column // will be `ColumnConst` @@ -530,7 +531,7 @@ Status OlapTableBlockConvertor::_fill_auto_inc_cols(vectorized::Block* block, si dst_values.resize_fill(rows, value); } } else if (const auto* src_nullable_column = - check_and_get_column(src_column_ptr)) { + check_and_get_column(src_column_ptr.get())) { auto src_nested_column_ptr = src_nullable_column->get_nested_column_ptr(); const auto& null_map_data = src_nullable_column->get_null_map_data(); dst_values.reserve(rows); diff --git a/be/src/vec/sink/writer/iceberg/partition_transformers.h b/be/src/vec/sink/writer/iceberg/partition_transformers.h index 79eb385b298a8f..0b18ce249522eb 100644 --- a/be/src/vec/sink/writer/iceberg/partition_transformers.h +++ b/be/src/vec/sink/writer/iceberg/partition_transformers.h @@ -153,8 +153,8 @@ class StringTruncatePartitionColumnTransform : public PartitionColumnTransform { ColumnPtr string_column_ptr; ColumnPtr null_map_column_ptr; bool is_nullable = false; - if (auto* nullable_column = - check_and_get_column(column_with_type_and_name.column)) { + if (const auto* nullable_column = + check_and_get_column(column_with_type_and_name.column.get())) { null_map_column_ptr = nullable_column->get_null_map_column_ptr(); string_column_ptr = nullable_column->get_nested_column_ptr(); is_nullable = true; @@ -211,7 +211,7 @@ class IntegerTruncatePartitionColumnTransform : public PartitionColumnTransform //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -270,7 +270,7 @@ class BigintTruncatePartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -332,8 +332,8 @@ class DecimalTruncatePartitionColumnTransform : public PartitionColumnTransform ColumnPtr column_ptr; ColumnPtr null_map_column_ptr; bool is_nullable = false; - if (auto* nullable_column = - check_and_get_column(column_with_type_and_name.column)) { + if (const auto* nullable_column = + check_and_get_column(column_with_type_and_name.column.get())) { null_map_column_ptr = nullable_column->get_null_map_column_ptr(); column_ptr = nullable_column->get_nested_column_ptr(); is_nullable = true; @@ -342,7 +342,7 @@ class DecimalTruncatePartitionColumnTransform : public PartitionColumnTransform is_nullable = false; } - const auto* const decimal_col = check_and_get_column>(column_ptr); + const auto* const decimal_col = check_and_get_column>(column_ptr.get()); const auto& vec_src = decimal_col->get_data(); auto col_res = ColumnDecimal::create(vec_src.size(), decimal_col->get_scale()); @@ -391,7 +391,7 @@ class IntBucketPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -454,7 +454,7 @@ class BigintBucketPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -518,7 +518,7 @@ class DecimalBucketPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -597,7 +597,7 @@ class DateBucketPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -665,7 +665,7 @@ class TimestampBucketPartitionColumnTransform : public PartitionColumnTransform //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -746,7 +746,7 @@ class StringBucketPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -811,7 +811,7 @@ class DateYearPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -883,7 +883,7 @@ class TimestampYearPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -955,7 +955,7 @@ class DateMonthPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -1027,7 +1027,7 @@ class TimestampMonthPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -1099,7 +1099,7 @@ class DateDayPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -1177,7 +1177,7 @@ class TimestampDayPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -1254,7 +1254,7 @@ class TimestampHourPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -1328,7 +1328,7 @@ class VoidPartitionColumnTransform : public PartitionColumnTransform { ColumnPtr column_ptr; ColumnPtr null_map_column_ptr; if (auto* nullable_column = - check_and_get_column(column_with_type_and_name.column)) { + check_and_get_column(column_with_type_and_name.column.get())) { null_map_column_ptr = nullable_column->get_null_map_column_ptr(); column_ptr = nullable_column->get_nested_column_ptr(); } else { diff --git a/be/src/vec/sink/writer/iceberg/viceberg_table_writer.cpp b/be/src/vec/sink/writer/iceberg/viceberg_table_writer.cpp index 29c97b59ea4dba..608afced8d92db 100644 --- a/be/src/vec/sink/writer/iceberg/viceberg_table_writer.cpp +++ b/be/src/vec/sink/writer/iceberg/viceberg_table_writer.cpp @@ -410,7 +410,7 @@ std::any VIcebergTableWriter::_get_iceberg_partition_value( int position) { //1) get the partition column ptr ColumnPtr col_ptr = partition_column.column->convert_to_full_column_if_const(); - CHECK(col_ptr != nullptr); + CHECK(col_ptr); if (col_ptr->is_nullable()) { const ColumnNullable* nullable_column = reinterpret_cast(col_ptr.get()); diff --git a/be/src/vec/utils/util.hpp b/be/src/vec/utils/util.hpp index 8d17b2787a53da..485d81311ba538 100644 --- a/be/src/vec/utils/util.hpp +++ b/be/src/vec/utils/util.hpp @@ -197,7 +197,7 @@ inline void change_null_to_true(ColumnPtr column, ColumnPtr argument = nullptr) data[i] |= null_map[i]; } memset(null_map, 0, rows); - } else if (argument != nullptr && argument->has_null()) { + } else if (argument && argument->has_null()) { const auto* __restrict null_map = assert_cast(argument.get())->get_null_map_data().data(); auto* __restrict data = diff --git a/be/test/olap/date_bloom_filter_test.cpp b/be/test/olap/date_bloom_filter_test.cpp index 715301419e228f..51de4ebd8e7452 100644 --- a/be/test/olap/date_bloom_filter_test.cpp +++ b/be/test/olap/date_bloom_filter_test.cpp @@ -155,8 +155,8 @@ TEST_F(DateBloomFilterTest, query_index_test) { { const auto& reader = segment->_column_readers[0]; std::unique_ptr bf_iter; - EXPECT_TRUE(reader->_bloom_filter_index->load(true, true).ok()); - EXPECT_TRUE(reader->_bloom_filter_index->new_iterator(&bf_iter).ok()); + EXPECT_TRUE(reader->_bloom_filter_index->load(true, true, nullptr).ok()); + EXPECT_TRUE(reader->_bloom_filter_index->new_iterator(&bf_iter, nullptr).ok()); std::unique_ptr bf; EXPECT_TRUE(bf_iter->read_bloom_filter(0, &bf).ok()); auto test = [&](const std::string& query_string, bool result) { @@ -174,8 +174,8 @@ TEST_F(DateBloomFilterTest, query_index_test) { { const auto& reader = segment->_column_readers[1]; std::unique_ptr bf_iter; - EXPECT_TRUE(reader->_bloom_filter_index->load(true, true).ok()); - EXPECT_TRUE(reader->_bloom_filter_index->new_iterator(&bf_iter).ok()); + EXPECT_TRUE(reader->_bloom_filter_index->load(true, true, nullptr).ok()); + EXPECT_TRUE(reader->_bloom_filter_index->new_iterator(&bf_iter, nullptr).ok()); std::unique_ptr bf; EXPECT_TRUE(bf_iter->read_bloom_filter(0, &bf).ok()); auto test = [&](const std::string& query_string, bool result) { diff --git a/be/test/olap/itoken_extractor_test.cpp b/be/test/olap/itoken_extractor_test.cpp index ea35f81973c73c..3904dbee5e766f 100644 --- a/be/test/olap/itoken_extractor_test.cpp +++ b/be/test/olap/itoken_extractor_test.cpp @@ -92,4 +92,497 @@ TEST_F(TestITokenExtractor, ngram_like_extractor) { runNextInStringLike(ngram_extractor, {from_u8string(u8"\\_手机%")}, {from_u8string(u8"_手"), from_u8string(u8"手机")}); } + +TEST_F(TestITokenExtractor, ngram_extractor_empty_input) { + // Test empty string input, expect no output + std::string statement = ""; + std::vector expect = {}; + NgramTokenExtractor ngram_extractor(2); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_single_char) { + // Only one character, less than n=2, should produce no tokens + std::string statement = "a"; + std::vector expect = {}; + NgramTokenExtractor ngram_extractor(2); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_ascii_characters) { + // Test token extraction for pure ASCII characters + std::string statement = "abcd"; + // 2-gram tokens: "ab", "bc", "cd" + std::vector expect = {"ab", "bc", "cd"}; + NgramTokenExtractor ngram_extractor(2); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_emoji) { + // Test scenarios that include Emoji and other multi-byte UTF-8 characters + // Assume n=2. Here "👍" is an emoji (4 bytes), "测" is a Chinese character (3 bytes). + // String: "👍测A" (3 elements: 1 Emoji, 1 Chinese char, 1 ASCII) + // For two code points per token: + // First token: "👍测" + // Second token: "测A" + std::string statement = from_u8string(u8"👍测A"); + std::vector expect = {from_u8string(u8"👍测"), from_u8string(u8"测A")}; + NgramTokenExtractor ngram_extractor(2); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_n_greater_than_length) { + // When n=3 and the string length is only 2, no 3-character Ngram can be formed + std::string statement = "ab"; + std::vector expect = {}; + NgramTokenExtractor ngram_extractor(3); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_chinese_only) { + // Test pure Chinese characters with multi-byte UTF-8 tokens + // String: "中国人" (3 Chinese chars, each 3 bytes) + // n=2, expected tokens: ["中国", "国人"] + std::string statement = from_u8string(u8"中国人"); + std::vector expect = {from_u8string(u8"中国"), from_u8string(u8"国人")}; + NgramTokenExtractor ngram_extractor(2); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_mixed_width_characters) { + // Mixed character widths: English (1 byte), Chinese (3 bytes), Emoji (4 bytes) + // String: "A中👍B" + // Code points: 'A'(1), '中'(1), '👍'(1), 'B'(1) total 4 code points + // n=2 tokens: "A中", "中👍", "👍B" + std::string statement = from_u8string(u8"A中👍B"); + std::vector expect = {from_u8string(u8"A中"), from_u8string(u8"中👍"), + from_u8string(u8"👍B")}; + NgramTokenExtractor ngram_extractor(2); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_like_extractor_empty_input) { + // Test empty input for like extraction + std::string statement = ""; + std::vector expect = {}; + NgramTokenExtractor ngram_extractor(2); + runNextInStringLike(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_like_extractor_no_pattern) { + // No % or _, equivalent to extracting n-length sequences. + // String: "abc", n=2, theoretically extract "ab", "bc" + // next_in_string_like requires n code points to return a token. + // Without % or _, it should still extract normally. + std::string statement = "abc"; + // n=2: extract "ab", then "bc" + std::vector expect = {"ab", "bc"}; + NgramTokenExtractor ngram_extractor(2); + runNextInStringLike(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_like_extractor_pattern1) { + // No % or _, equivalent to extracting n-length sequences. + // String: "abc", n=2, theoretically extract "ab", "bc" + // next_in_string_like requires n code points to return a token. + // Without % or _, it should still extract normally. + std::string statement = "%abc%def%gh%"; + // n=2: extract "ab", then "bc" + std::vector expect = {"ab", "bc", "de", "ef", "gh"}; + NgramTokenExtractor ngram_extractor(2); + runNextInStringLike(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_like_extractor_patterns_only) { + // String has only '%' and '_', no normal chars to form a 2-gram + // "%__%", n=2: % and _ are not considered normal token characters + // Each encounter of % resets the token, so no tokens are generated + std::string statement = "%__%"; + std::vector expect = {}; + NgramTokenExtractor ngram_extractor(2); + runNextInStringLike(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_like_extractor_escaped_characters) { + // Test scenarios with escape characters: "\\%abc% \\_xyz_" + // Escaped '%' should be treated as a normal character, similarly for '_' + // Suppose n=2, for "\\%abc%": + // Initially encounter '\\%' => escaped '%', include it in token: "%a" + // Then 'a'(1 byte) 'b'(1 byte) form "ab", 'c'(1 byte) continues... + // A bit complex example, mainly to demonstrate properly handling escaped chars. + std::string statement = from_u8string(u8"\\%手机% \\_人_"); + // Analysis: + // "\\%" -> escaped '%', token gets "%" + // then "手"(1 code point), "机"(1 code point). Once 2 code points are formed, we have "%手" + // Move pos. Next token starts from "机": + // '机'(1 code point) + // Next is '%', encountering '%', reset token, skip over ' '... + // Next segment: "\\_人_" + // "\\_" => escaped '_', token gets "_" + // '人'(1 code point) + '_' pattern encountered resets token after outputting "_人" + // Final result: {"%手", "_人"} + // Note: Based on logic, pattern chars % and _ reset the token. After a token is output, + // encountering % or _ resets the token to empty, not affecting previously output tokens. + std::vector expect = {"%手", "手机", " _", "_人"}; + NgramTokenExtractor ngram_extractor(2); + runNextInStringLike(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_like_extractor_complex_pattern) { + // Complex scenario: "abc%中_\\%国%d" + // n=2 analysis: + // Start from the beginning: 'a'(1 code point), 'b'(1 code point) => "ab" output + // Encounter 'c' then '%', at '%' reset token and move forward + // Next: "中"(1 code point), '_' is pattern reset + // Then "\\%" => '%'(1 code point), '国'(1 code point) => "%国" output + // Encounter '%', reset token + // Finally 'd' alone is not enough to form 2 code points, no output + std::string statement = from_u8string(u8"abc%中_\\%国%d"); + std::vector expect = {"ab", "bc", "%国"}; + NgramTokenExtractor ngram_extractor(2); + runNextInStringLike(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_different_n) { + // Test different n values + // String: "abcd" + // n=3: extract "abc", "bcd" + std::string statement = "abcd"; + std::vector expect = {"abc", "bcd"}; + NgramTokenExtractor ngram_extractor(3); + runNextInString(ngram_extractor, statement, expect); +} + +std::string get_repetition_info(const std::string& text, size_t n) { + NgramTokenExtractor ngram_extractor(n); + std::vector tokens; + + { + size_t pos = 0; + size_t token_start = 0; + size_t token_length = 0; + while (ngram_extractor.next_in_string(text.c_str(), text.size(), &pos, &token_start, + &token_length)) { + tokens.push_back(text.substr(token_start, token_length)); + } + } + + std::unordered_map token_count; + for (auto& t : tokens) { + token_count[t]++; + } + + int total_tokens = static_cast(tokens.size()); + int repeated_tokens = 0; + for (auto& kv : token_count) { + if (kv.second > 1) { + repeated_tokens += kv.second; + } + } + + double repetition_rate = 0.0; + if (total_tokens > 0) { + repetition_rate = static_cast(repeated_tokens) / total_tokens; + } + + std::ostringstream oss; + oss << "Total tokens: " << total_tokens << "\n" + << "Repeated tokens: " << repeated_tokens << "\n" + << "Repetition rate: " << repetition_rate << "\n"; + + return oss.str(); +} + +TEST_F(TestITokenExtractor, ngram_extractor_repetition_rate_matchine_text) { + std::string statement = + "Exception=System.CannotUnloadAppDomain;\n" + "HResult=0x00007486;\n" + "Message=exception happened;\n" + "Source=BenchmarkLogGenerator;\n" + "StackTrace:\n" + " at BenchmarkLogGenerator.Generator.Run(Int32 sizeFactor) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 84\n" + " at BenchmarkLogGenerator.Generator.<>c__DisplayClass26_0.b__0() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 74\n" + " at System.Threading.ThreadHelper.ThreadStart_Context(Object state)\n" + " at System.Threading.ExecutionContext.RunInternal(ExecutionContext executionContext)\n" + " at BenchmarkLogGenerator.Flows.BootFlow.GetLevel(Int64 v) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 85\n" + " at BenchmarkLogGenerator.Flows.BootFlow.d__1.MoveNext() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 47\n" + " at BenchmarkLogGenerator.Scheduler.Flow.NextStep() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 74\n" + " at BenchmarkLogGenerator.Scheduler.Step.EnqueueNextStep(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 112\n" + " at BenchmarkLogGenerator.Scheduler.FlowDelayStep.Execute(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 137\n" + " at BenchmarkLogGenerator.Scheduler.Run() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 28\n" + " at BenchmarkLogGenerator.Generator.Run(Int32 sizeFactor) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 84\n" + " at BenchmarkLogGenerator.Generator.<>c__DisplayClass26_0.b__0() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 74\n" + " at System.Threading.ThreadHelper.ThreadStart_Context(Object state)\n" + " at System.Threading.ExecutionContext.RunInternal(ExecutionContext executionContext)\n" + " at BenchmarkLogGenerator.Flows.BootFlow.GetLevel(Int64 v) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 85\n" + " at BenchmarkLogGenerator.Flows.BootFlow.d__1.MoveNext() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 47\n" + " at BenchmarkLogGenerator.Scheduler.Flow.NextStep() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 74\n" + " at BenchmarkLogGenerator.Scheduler.Step.EnqueueNextStep(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 112\n" + " at BenchmarkLogGenerator.Scheduler.FlowDelayStep.Execute(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 137\n" + " at BenchmarkLogGenerator.Scheduler.Run() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 28\n" + " at BenchmarkLogGenerator.Generator.Run(Int32 sizeFactor) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 84\n" + " at BenchmarkLogGenerator.Generator.<>c__DisplayClass26_0.b__0() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 74\n" + " at System.Threading.ThreadHelper.ThreadStart_Context(Object state)\n" + " at System.Threading.ExecutionContext.RunInternal(ExecutionContext executionContext)\n" + " at BenchmarkLogGenerator.Flows.BootFlow.GetLevel(Int64 v) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 85\n" + " at BenchmarkLogGenerator.Flows.BootFlow.d__1.MoveNext() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 47\n" + " at BenchmarkLogGenerator.Scheduler.Flow.NextStep() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 74\n" + " at BenchmarkLogGenerator.Scheduler.Step.EnqueueNextStep(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 112\n" + " at BenchmarkLogGenerator.Scheduler.FlowDelayStep.Execute(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 137\n" + " at BenchmarkLogGenerator.Scheduler.Run() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 28\n" + " at BenchmarkLogGenerator.Generator.Run(Int32 sizeFactor) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 84\n" + " at BenchmarkLogGenerator.Generator.<>c__DisplayClass26_0.b__0() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 74\n" + " at System.Threading.ThreadHelper.ThreadStart_Context(Object state)\n" + " at System.Threading.ExecutionContext.RunInternal(ExecutionContext executionContext)\n" + " at BenchmarkLogGenerator.Flows.BootFlow.GetLevel(Int64 v) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 85\n" + " at BenchmarkLogGenerator.Flows.BootFlow.d__1.MoveNext() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 47\n" + " at BenchmarkLogGenerator.Scheduler.Flow.NextStep() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 74\n" + " at BenchmarkLogGenerator.Scheduler.Step.EnqueueNextStep(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 112\n" + " at BenchmarkLogGenerator.Scheduler.FlowDelayStep.Execute(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 137\n" + " at BenchmarkLogGenerator.Scheduler.Run() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 28\n" + " at BenchmarkLogGenerator.Generator.Run(Int32 sizeFactor) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 84\n" + " at BenchmarkLogGenerator.Generator.<>c__DisplayClass26_0.b__0() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 74\n" + " at System.Threading.ThreadHelper.ThreadStart_Context(Object state)\n" + " at System.Threading.ExecutionContext.RunInternal(ExecutionContext executionContext)\n" + " at BenchmarkLogGenerator.Flows.BootFlow.GetLevel(Int64 v) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 85\n" + " at BenchmarkLogGenerator.Flows.BootFlow.d__1.MoveNext() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 47\n" + " at BenchmarkLogGenerator.Scheduler.Flow.NextStep() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 74\n" + " at BenchmarkLogGenerator.Scheduler.Step.EnqueueNextStep(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 112\n" + " at BenchmarkLogGenerator.Scheduler.FlowDelayStep.Execute(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 137\n" + " at BenchmarkLogGenerator.Scheduler.Run() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 28\n" + " at BenchmarkLogGenerator.Generator.Run(Int32 sizeFactor) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 84\n" + " at BenchmarkLogGenerator.Generator.<>c__DisplayClass26_0.b__0() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 74\n" + " at System.Threading.ThreadHelper.ThreadStart_Context(Object state)\n" + " at System.Threading.ExecutionContext.RunInternal(ExecutionContext executionContext)\n" + " at BenchmarkLogGenerator.Flows.BootFlow.GetLevel(Int64 v) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 85"; + size_t n = 5; + std::string info = get_repetition_info(statement, n); + + std::cout << info << std::endl; +} + +TEST_F(TestITokenExtractor, ngram_extractor_repetition_rate_short_text) { + std::string statement = + "I bought these leggings for my daughter @ Christmas along with several other " + "leggings. She liked these leggings the best since they were lined and are very warm. " + " She is 5'3" and 115 lbs. and they fit her very well/comfortable. The only thing " + "I disliked about them is that the pattern is not uniform on both legs as it gets to " + "your upper thigh area."; + size_t n = 5; + std::string info = get_repetition_info(statement, n); + + std::cout << info << std::endl; +} + +TEST_F(TestITokenExtractor, ngram_extractor_repetition_rate_medium_text) { + std::string statement = + "Loving the fabulous and exquisite women's wear for plus size women, because of how " + "this sweater makes you feel good about yourself, and speaks to her heart with a " + "positive perspective, given her overall character as well." + "I bought these leggings for my daughter @ Christmas along with several other " + "leggings. She liked these leggings the best since they were lined and are very warm. " + " She is 5'3" and 115 lbs. and they fit her very well/comfortable. The only thing " + "I disliked about them is that the pattern is not uniform on both legs as it gets to " + "your upper thigh area." + "Love my boot cuffs I got as a gift. This is one I won’t be re-gifting. People at work " + "love it, good quality and good value. Love that it’s reversible and I can wear it " + "with any size boots." + "Reminds me of being 13 in the early 80's, only these are more attractive. These leg " + "warmers are exactly as pictured, soft & warm over my jeans to keep out the chill on " + "this snowy day. Brand new in package & I am very happy with this purchase. I will " + "buy another pair to double up the warmth on my bare legs." + "I couldn't be happier with this dress. It is the epitome of classic WW2 era ladies " + "fashion.
The material is lightweight, yet very soft and silky. It has a full " + "lining to it. I would
recommend sizing up on this particular
style as it " + "has a way of hugging your
curves, and in the midsection .

If you have " + "a perfectly flat stomach, then No worries.
But ladies who have a wee bit of a " + "pouch inFront, this dress may hug you a tad in the tummy.
It hangs very nicely " + "in back, and flows
beautifully. Honestly , i would order one in
every " + "color of the rainbow if they sold
them !
I love it, Thank You!
This is " + "my 4th dress from this vendor, and
by far my favorite." + "This tie is super cute! I love the color and the design... but that's about it.

The day after receiving it in the mail I strapped it on and wore it to work. " + "Within the first few hours I noticed the little white Vs began to fray and frizz. By " + "the end if the day most of white threading had completely frayed out. This tie was " + "very, very cheaply made.

It's a shame, because it is... or was... a very " + "good-looking bow tie!" + "The color and pictures looks very good. It fits really nicely with a bit of stretch " + "in the material. I was afraid after washing it that the colors would fade but it did " + "not. I highly recommand it t!!!" + "I just purchased this coat, and I have to say that so far, I am very satisfied with " + "it. The belt is a nice added touch, but not necessary to wear. This coat keeps me " + "very warm, and with the winter we're having this year, it's been a life saver. I " + "have gotten compliments on how it looks as well. This is replacing another coat that " + "had a zipper that broke after two winters of wearing it, so I am being extra careful " + "when zippering up this one. It's too soon to say how sturdy the zipper is on this " + "one, but as far as everything else, it's serving its purpose well. I highly " + "recommend it for the quality and price." + "ABSOLUTELY JUNK! wore it about four times then the hood nearly ripped completely off! " + "The Seam came out completely! DO NOT BUY WOULD LOVE TO HAVE MY MONEY COMPLETELY " + "REFUNDED!" + "this was the worst thing I brought online
it was very cheaply made size not " + "true brought
as a gift was so embarrassing the person did not accept the gift
the fur inside looked real fake I am stuck with this one" + "Honestly the most comfortable jacket I've ever worn. Will probably buy this jacket " + "for the rest of my life. End of story" + "ok Im trying to figure out if this is women or unisex sizing..This has a man wearing " + "it but it clearly is for a girl. I need to know before I order." + "Very comfortable and cute! It works well in school uniform and everyday wear. The " + "light material and zippers on the shoulders are super unique and welcomed addition to " + "my otherwise drab uniform!" + "The color is active. THe style is ok.
One thing to remember is to order one size " + "bigger than your regular size. For example, I wear S and the size M is OK ON me" + "These are actually considered panty hose. Unless you are using under a dress or a " + "very long sweater dont buy. Leggins is not the right description!!!''" + "Nice Dress" + "I am overall happy with the leggings. But be aware that if you are larger then a size " + "8, these will be too small for you. I am a size 8 and they just fit. The pattern is " + "stretched out quite a bit, but I think it still looks pretty good even tho the " + "pattern stretch out is not quite as bright and crisp. No complaints about the length " + "for me. I am 5'7" and these leggings reach my ankles without the feeling that " + "they are going to pull off of my hips." + "I bought these jeans knowing they were marked 'irregular' and thought there would be " + "a noticeable flaw. But when I received these jeans I was pleasantly surprised. They " + "look great and I couldn't find a flaw. The only thing I noticed was that the jeans " + "fit a bit tight around my butt. This is my first pair of big star jeans so it could " + "just be how they fit but I'm not sure. Other than that, these jeans are great for the " + "price." + "great scarf for price, ships quickly, color is more turquoise, than baby blue. really " + "like the chevron design lots of compliments." + "The fit of these leggings is excellent, they are extremely comfortable and true to " + "size. Not a skinny girl's legging, there's room to breathe. The classy, paisley " + "pattern makes regular black leggings seem boring. Good material and the design is " + "done nicely. An excellent buy, thanks Amazon." + "The dress is gorgeous and the mesh hearts are awesome. the material was a little " + "surprising, but its really cool" + "It did take long to get though well worth the wait... This was a gift for my daughter " + "and she loved it!! No issues with the product !" + "I love this sweater. I bought it for my daughter and she loves it. The colors are " + "very bright and I will surely be purchasing more from this seller." + "I bought this sweater in this color and in black in medium. I wear a medium. I " + "tried on the black first and the entire sweater fell apart as I was putting it on! " + "It literally came apart at the seams!" + "This wallet is nice looking and has the strongest chain I have ever seen. However, " + "it
simply has too few wallets for credit cards, so I sent it back. Others, " + "however may like
it, so check it out anyway." + "My husband loves his new scarf, as it is so extremely soft and warm. He was even " + "willing to give up his favorite scarf, which he has worn for years, for this one. It " + "adds just the right amount of color at the neckline of his black wool overcoat to " + "wear to the office." + "This dress appears to be quite beautiful in picture but is not. The materials was not " + "very nice, looked a bit cheap. as well the overall fit was not very nice. Had the " + "materials been of slightly better quality, it would have made up for some minor " + "imperfections. The dress runs very very small. I am an xs/s typically and thought " + "this was just too too tight and uncomfortable." + "Very nice scarves. Only complaint would be the description says one is purple but it " + "is actually a burgandy color." + "I ordered a large which is my usual size and found the arms to really tight even " + "without a winter sweater.
Poor quality - strings and "pulls" everywhere" + "Thank you so much for my my beautiful dress. The fit was perfect. The detail of the " + "dress was exactly like the picture. Also the dress was delivered before time. Thanks " + "again and I will be making future purchases very soon.5 stars for sure." + "this is a great looking shirt but i wish they had it in a medium i would definatley " + "spend my money if it was smaller" + "Purchased this for my granddaughter, and she simply loves it! People tell her, she " + "looks like a "Pop Star" because of the design and even mention she looks like " + "Michael Jackson! All she needs is to learn how to sing and dance!" + "At first I was worried that they would not stay up, but that was not a problem. I " + "wish they were available in a calf length for boots" + "I purchased this hat, more for a joke then keeping warm. The hat and beard are well " + "made. Looks cool. I don't think the beard would really do much to keep your face " + "warm. My buddies all got a laugh when I showed up wearing it." + "The actual shorts and ordering process was great but listed measurements diddnt match " + "up. I ordered the nxt size up and still too small." + "If you are looking for stretchy these aren't it so make sure to order right size. " + "Because of the fleece material inside they slide down constantly. Not too happy. But " + "they are pretty." + "So I have a 45+ inch chest and a 31 inch waist. Some would say that I'm athletically " + "proportioned. I will never find anything that fits me the way that it's supposed to " + "fit but this hoodie came damn near close. The US XL is nearly perfect for me. It " + "tappers around the waist as advertise even for broader guy like myself. My only quirk " + "is the collar around the hood gives a "no neck" appearance. But it's growing " + "on me. So as I said "nearly perfect"." + "This hat was purchased for my nephew for Christmas. It barely made it through " + "Christmas Eve. The fabric is extremely flimsy and there was a giant hole in it after " + "one or two times he put it on. I was able to get Amazon to refund my money, but not " + "worth the purchase. Very flimsy material." + "Got these for my mom and she wears them all the time. cute and comfy. I will borrow " + "them from her soon." + "first, the color is not like the picture above, the material of the shirt looks so " + "cheap and uncomfortable, the lace also looks so cheap.
second, at least use a " + "better material, the product really don't looks like the picture and not worthy at all" + "I purchased for my daughter and she loves it! This is a very high quality product " + "and worth the cost. I certainly would not pay $500 as the suggested price but " + "certainly worth the $160 paid. It did take nearly one month to arrive." + "The elastic material is comfortable, fits great on me . The straps are detachable so " + "you can have it cross your back or go bare." + "This blazer was poorly sewn together. The metal closure fell off when trying it on " + "for the first time. The material was uneven in length. This was a disappointing " + "purchase." + "I'm wearing this with my steelers t-shirt when I go to Vegas in a couple of weeks to " + "represent my team even though we not in the super bowl" + "I ordered a 3X. Normally a 2X will fit me in most clothing, but I order 3X when " + "available. This was tight and very,very thin. I returned it." + "This hood is super adorable and I love the pink/gray combination. There are just 2 " + "small things that I wasn't thrilled about. 1) The hood itself is just a tad small. 2) " + "The back part is cut kinda short leaving my neck a tinse exposed but I just pushed " + "the hood further back on my head and got a bit more coverage out of it. But I can " + "live with those things because it is super cute!" + "Love the color, cut and style of these gloves. They keep my hands warm without " + "restricting the use of my fingers for keying, sorting , etc. I think they are the " + "smartest buy I've made all winter!" + "so sucks the quality
the color is not like the picture above and the fur makes " + "it looks so cheap" + "And they look great on me! LOL They are simple with a classic look to them. I'll " + "probably pair with similar color shoes." + "The size was at least two sizes smaller than the printed size. They do not shape " + "well. I was very disappointed."; + size_t n = 5; + std::string info = get_repetition_info(statement, n); + + std::cout << info << std::endl; +} } // namespace doris diff --git a/be/test/olap/primary_key_index_test.cpp b/be/test/olap/primary_key_index_test.cpp index 72aae56cd0938f..9407be938867ec 100644 --- a/be/test/olap/primary_key_index_test.cpp +++ b/be/test/olap/primary_key_index_test.cpp @@ -80,12 +80,12 @@ TEST_F(PrimaryKeyIndexTest, builder) { PrimaryKeyIndexReader index_reader; io::FileReaderSPtr file_reader; EXPECT_TRUE(fs->open_file(filename, &file_reader).ok()); - EXPECT_TRUE(index_reader.parse_index(file_reader, index_meta).ok()); - EXPECT_TRUE(index_reader.parse_bf(file_reader, index_meta).ok()); + EXPECT_TRUE(index_reader.parse_index(file_reader, index_meta, nullptr).ok()); + EXPECT_TRUE(index_reader.parse_bf(file_reader, index_meta, nullptr).ok()); EXPECT_EQ(num_rows, index_reader.num_rows()); std::unique_ptr index_iterator; - EXPECT_TRUE(index_reader.new_iterator(&index_iterator).ok()); + EXPECT_TRUE(index_reader.new_iterator(&index_iterator, nullptr).ok()); bool exact_match = false; uint32_t row_id; for (size_t i = 0; i < keys.size(); i++) { @@ -142,7 +142,7 @@ TEST_F(PrimaryKeyIndexTest, builder) { int batch_size = 1024; while (remaining > 0) { std::unique_ptr iter; - EXPECT_TRUE(index_reader.new_iterator(&iter).ok()); + EXPECT_TRUE(index_reader.new_iterator(&iter, nullptr).ok()); size_t num_to_read = std::min(batch_size, remaining); auto index_type = vectorized::DataTypeFactory::instance().create_data_type( @@ -199,12 +199,12 @@ TEST_F(PrimaryKeyIndexTest, multiple_pages) { PrimaryKeyIndexReader index_reader; io::FileReaderSPtr file_reader; EXPECT_TRUE(fs->open_file(filename, &file_reader).ok()); - EXPECT_TRUE(index_reader.parse_index(file_reader, index_meta).ok()); - EXPECT_TRUE(index_reader.parse_bf(file_reader, index_meta).ok()); + EXPECT_TRUE(index_reader.parse_index(file_reader, index_meta, nullptr).ok()); + EXPECT_TRUE(index_reader.parse_bf(file_reader, index_meta, nullptr).ok()); EXPECT_EQ(num_rows, index_reader.num_rows()); std::unique_ptr index_iterator; - EXPECT_TRUE(index_reader.new_iterator(&index_iterator).ok()); + EXPECT_TRUE(index_reader.new_iterator(&index_iterator, nullptr).ok()); bool exact_match = false; uint32_t row_id; for (size_t i = 0; i < keys.size(); i++) { @@ -283,12 +283,12 @@ TEST_F(PrimaryKeyIndexTest, single_page) { PrimaryKeyIndexReader index_reader; io::FileReaderSPtr file_reader; EXPECT_TRUE(fs->open_file(filename, &file_reader).ok()); - EXPECT_TRUE(index_reader.parse_index(file_reader, index_meta).ok()); - EXPECT_TRUE(index_reader.parse_bf(file_reader, index_meta).ok()); + EXPECT_TRUE(index_reader.parse_index(file_reader, index_meta, nullptr).ok()); + EXPECT_TRUE(index_reader.parse_bf(file_reader, index_meta, nullptr).ok()); EXPECT_EQ(num_rows, index_reader.num_rows()); std::unique_ptr index_iterator; - EXPECT_TRUE(index_reader.new_iterator(&index_iterator).ok()); + EXPECT_TRUE(index_reader.new_iterator(&index_iterator, nullptr).ok()); bool exact_match = false; uint32_t row_id; for (size_t i = 0; i < keys.size(); i++) { diff --git a/be/test/olap/rowset/segment_v2/bloom_filter_index_reader_writer_test.cpp b/be/test/olap/rowset/segment_v2/bloom_filter_index_reader_writer_test.cpp index 813952595efcfd..e561f8ce944887 100644 --- a/be/test/olap/rowset/segment_v2/bloom_filter_index_reader_writer_test.cpp +++ b/be/test/olap/rowset/segment_v2/bloom_filter_index_reader_writer_test.cpp @@ -124,10 +124,10 @@ void get_bloom_filter_reader_iter(const std::string& file_name, const ColumnInde io::FileReaderSPtr file_reader; ASSERT_EQ(io::global_local_filesystem()->open_file(fname, &file_reader), Status::OK()); *reader = new BloomFilterIndexReader(std::move(file_reader), meta.bloom_filter_index()); - auto st = (*reader)->load(true, false); + auto st = (*reader)->load(true, false, nullptr); EXPECT_TRUE(st.ok()); - st = (*reader)->new_iterator(iter); + st = (*reader)->new_iterator(iter, nullptr); EXPECT_TRUE(st.ok()); } diff --git a/be/test/olap/rowset/segment_v2/ordinal_page_index_test.cpp b/be/test/olap/rowset/segment_v2/ordinal_page_index_test.cpp index 33848c5959cfaa..ffd9c92ee0272e 100644 --- a/be/test/olap/rowset/segment_v2/ordinal_page_index_test.cpp +++ b/be/test/olap/rowset/segment_v2/ordinal_page_index_test.cpp @@ -74,7 +74,7 @@ TEST_F(OrdinalPageIndexTest, normal) { io::FileReaderSPtr file_reader; EXPECT_TRUE(fs->open_file(filename, &file_reader).ok()); OrdinalIndexReader index(file_reader, 16 * 1024 * 4096 + 1, index_meta.ordinal_index()); - EXPECT_TRUE(index.load(true, false).ok()); + EXPECT_TRUE(index.load(true, false, nullptr).ok()); EXPECT_EQ(16 * 1024, index.num_data_pages()); EXPECT_EQ(1, index.get_first_ordinal(0)); EXPECT_EQ(4096, index.get_last_ordinal(0)); @@ -128,7 +128,7 @@ TEST_F(OrdinalPageIndexTest, one_data_page) { } OrdinalIndexReader index(nullptr, num_values, index_meta.ordinal_index()); - EXPECT_TRUE(index.load(true, false).ok()); + EXPECT_TRUE(index.load(true, false, nullptr).ok()); EXPECT_EQ(1, index.num_data_pages()); EXPECT_EQ(0, index.get_first_ordinal(0)); EXPECT_EQ(num_values - 1, index.get_last_ordinal(0)); diff --git a/be/test/olap/segment_cache_test.cpp b/be/test/olap/segment_cache_test.cpp index b226bc6c2292e5..c527ffddd424b9 100644 --- a/be/test/olap/segment_cache_test.cpp +++ b/be/test/olap/segment_cache_test.cpp @@ -323,7 +323,7 @@ TEST_F(SegmentCacheTest, vec_sequence_col) { segment_v2::SegmentSharedPtr segment_ptr = handle.get_segments()[0]; // load index and bf second - res = segment_ptr->load_pk_index_and_bf(); + res = segment_ptr->load_pk_index_and_bf(nullptr); ASSERT_TRUE(res.ok()); // check cache mem usage equals to segment mem usage diff --git a/be/test/util/core_local_test.cpp b/be/test/util/core_local_test.cpp deleted file mode 100644 index ed87015b189e1c..00000000000000 --- a/be/test/util/core_local_test.cpp +++ /dev/null @@ -1,122 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -#include "util/core_local.h" - -#include -#include -#include -#include - -#include -#include - -#include "common/logging.h" -#include "gtest/gtest_pred_impl.h" -#include "testutil/test_util.h" -#include "util/stopwatch.hpp" - -namespace doris { - -// Fixture for testing class Decompressor -class CoreLocalTest : public ::testing::Test { -protected: - CoreLocalTest() {} - ~CoreLocalTest() {} -}; - -void updater(int64_t loop, CoreLocalValue* value, int64_t* used_ns) { - usleep(100); - MonotonicStopWatch stopwatch; - stopwatch.start(); - for (int i = 0; i < loop; ++i) { - __sync_fetch_and_add(value->access(), 1); - } - *used_ns = stopwatch.elapsed_time(); -} - -TEST_F(CoreLocalTest, CoreLocalValue) { - int64_t loop = LOOP_LESS_OR_MORE(1000, 1000000L); - CoreLocalValue value; - std::vector used_ns; - used_ns.resize(8); - std::vector workers; - for (int i = 0; i < 8; ++i) { - workers.emplace_back(updater, loop, &value, &used_ns[i]); - } - int64_t sum_ns = 0; - for (int i = 0; i < 8; ++i) { - workers[i].join(); - sum_ns += used_ns[i]; - } - int64_t sum = 0; - for (int i = 0; i < value.size(); ++i) { - sum += __sync_fetch_and_add(value.access_at_core(i), 0); - } - EXPECT_EQ(8 * loop, sum); - LOG(INFO) << "time:" << sum_ns / sum << "ns/op"; -} - -TEST_F(CoreLocalTest, CoreDataAllocator) { - CoreDataAllocatorFactory factory; - auto allocator1 = factory.get_allocator(1, 8); - auto ptr = allocator1->get_or_create(0); - EXPECT_TRUE(ptr != nullptr); - { - auto ptr2 = allocator1->get_or_create(0); - EXPECT_TRUE(ptr == ptr2); - } - { - auto ptr2 = allocator1->get_or_create(4096); - EXPECT_TRUE(ptr2 != nullptr); - } - { - auto allocator2 = factory.get_allocator(2, 8); - EXPECT_TRUE(allocator2 != allocator1); - } -} - -TEST_F(CoreLocalTest, CoreLocalValueController) { - CoreLocalValueController controller; - auto id = controller.get_id(); - EXPECT_EQ(0, id); - controller.reclaim_id(id); - id = controller.get_id(); - EXPECT_EQ(0, id); - id = controller.get_id(); - EXPECT_EQ(1, id); -} - -TEST_F(CoreLocalTest, CoreLocalValueNormal) { - CoreLocalValue value; - for (int i = 0; i < value.size(); ++i) { - EXPECT_EQ(0, *value.access_at_core(i)); - *value.access_at_core(i) += 1; - } - for (int i = 0; i < value.size(); ++i) { - EXPECT_EQ(1, *value.access_at_core(i)); - } - for (int i = 0; i < 10000; ++i) { - *value.access() += 1; - } - int64_t sum = 0; - for (int i = 0; i < value.size(); ++i) { - sum += *value.access_at_core(i); - } - EXPECT_EQ(10000 + value.size(), sum); -} -} // namespace doris diff --git a/be/test/util/doris_metrics_test.cpp b/be/test/util/doris_metrics_test.cpp index dcba57cb7e9ff2..6e9969b1210345 100644 --- a/be/test/util/doris_metrics_test.cpp +++ b/be/test/util/doris_metrics_test.cpp @@ -34,14 +34,14 @@ TEST_F(DorisMetricsTest, Normal) { auto server_entity = DorisMetrics::instance()->server_entity(); // check metric { - DorisMetrics::instance()->fragment_requests_total->reset(); + DorisMetrics::instance()->fragment_requests_total->set_value(0); DorisMetrics::instance()->fragment_requests_total->increment(12); auto metric = server_entity->get_metric("fragment_requests_total"); EXPECT_TRUE(metric != nullptr); EXPECT_STREQ("12", metric->to_string().c_str()); } { - DorisMetrics::instance()->fragment_request_duration_us->reset(); + DorisMetrics::instance()->fragment_request_duration_us->set_value(0); DorisMetrics::instance()->fragment_request_duration_us->increment(101); auto metric = server_entity->get_metric("fragment_request_duration_us"); EXPECT_TRUE(metric != nullptr); @@ -92,7 +92,7 @@ TEST_F(DorisMetricsTest, Normal) { } // engine request { - DorisMetrics::instance()->create_tablet_requests_total->reset(); + DorisMetrics::instance()->create_tablet_requests_total->set_value(0); DorisMetrics::instance()->create_tablet_requests_total->increment(15); auto metric = server_entity->get_metric("create_tablet_requests_total", "engine_requests_total"); @@ -100,7 +100,7 @@ TEST_F(DorisMetricsTest, Normal) { EXPECT_STREQ("15", metric->to_string().c_str()); } { - DorisMetrics::instance()->drop_tablet_requests_total->reset(); + DorisMetrics::instance()->drop_tablet_requests_total->set_value(0); DorisMetrics::instance()->drop_tablet_requests_total->increment(16); auto metric = server_entity->get_metric("drop_tablet_requests_total", "engine_requests_total"); @@ -129,7 +129,7 @@ TEST_F(DorisMetricsTest, Normal) { EXPECT_STREQ("20", metric->to_string().c_str()); } { - DorisMetrics::instance()->storage_migrate_requests_total->reset(); + DorisMetrics::instance()->storage_migrate_requests_total->set_value(0); DorisMetrics::instance()->storage_migrate_requests_total->increment(21); auto metric = server_entity->get_metric("storage_migrate_requests_total", "engine_requests_total"); diff --git a/be/test/util/metrics_test.cpp b/be/test/util/metrics_test.cpp index 305d17c47ca06f..1703b5b42bd7b4 100644 --- a/be/test/util/metrics_test.cpp +++ b/be/test/util/metrics_test.cpp @@ -46,7 +46,7 @@ TEST_F(MetricsTest, Counter) { EXPECT_STREQ("100", counter.to_string().c_str()); } { - IntAtomicCounter counter; + IntCounter counter; EXPECT_EQ(0, counter.value()); counter.increment(100); EXPECT_EQ(100, counter.value()); @@ -99,7 +99,7 @@ TEST_F(MetricsTest, CounterPerf) { } // IntAtomicCounter { - IntAtomicCounter counter; + IntCounter counter; MonotonicStopWatch watch; watch.start(); for (int i = 0; i < kLoopCount; ++i) { @@ -141,11 +141,11 @@ TEST_F(MetricsTest, CounterPerf) { } // multi-thread for IntAtomicCounter { - IntAtomicCounter mt_counter; + IntCounter mt_counter; std::vector updaters; std::atomic used_time(0); for (int i = 0; i < 8; ++i) { - updaters.emplace_back(&mt_updater, kThreadLoopCount, &mt_counter, + updaters.emplace_back(&mt_updater, kThreadLoopCount, &mt_counter, &used_time); } for (int i = 0; i < 8; ++i) { diff --git a/be/test/vec/columns/common_column_test.h b/be/test/vec/columns/common_column_test.h index 8e1b86c0168f99..b70ac660136216 100644 --- a/be/test/vec/columns/common_column_test.h +++ b/be/test/vec/columns/common_column_test.h @@ -989,7 +989,7 @@ class CommonColumnTest : public ::testing::Test { // check size EXPECT_EQ(ptr->size(), *cl); // check ptr is not the same - EXPECT_NE(ptr.get(), source_column); + EXPECT_NE(ptr.get(), source_column.get()); // check after clone_resized with assert_res auto ser_col = ColumnString::create(); @@ -1042,7 +1042,7 @@ class CommonColumnTest : public ::testing::Test { // check size EXPECT_EQ(ptr->size(), insert_size); // check ptr is not the same - EXPECT_NE(ptr.get(), source_column); + EXPECT_NE(ptr.get(), source_column.get()); // check after cut with assert_res auto ser_col = ColumnString::create(); ser_col->reserve(ptr->size()); @@ -1095,7 +1095,7 @@ class CommonColumnTest : public ::testing::Test { // check size EXPECT_EQ(ptr->size(), insert_size); // check ptr is not the same - EXPECT_NE(ptr.get(), source_column); + EXPECT_NE(ptr.get(), source_column.get()); // check after cut with assert_res auto ser_col = ColumnString::create(); ser_col->reserve(ptr->size()); diff --git a/be/test/vec/data_types/from_string_test.cpp b/be/test/vec/data_types/from_string_test.cpp index 01515b805d9be0..eb8b00ab16f69c 100644 --- a/be/test/vec/data_types/from_string_test.cpp +++ b/be/test/vec/data_types/from_string_test.cpp @@ -203,7 +203,7 @@ TEST(FromStringTest, ScalaWrapperFieldVsDataType) { string test_str = std::get<1>(type_pair)[i]; // data_type from_string ReadBuffer rb_test(test_str.data(), test_str.size()); - Status st = data_type_ptr->from_string(rb_test, col); + Status st = data_type_ptr->from_string(rb_test, col.get()); if (std::get<3>(type_pair)[i].empty()) { EXPECT_EQ(st.ok(), false); std::cout << "deserialize failed: " << st.to_json() << std::endl; @@ -256,11 +256,11 @@ TEST(FromStringTest, ScalaWrapperFieldVsDataType) { ReadBuffer rand_rb(rand_date.data(), rand_date.size()); auto col = data_type_ptr->create_column(); - Status st = data_type_ptr->from_string(min_rb, col); + Status st = data_type_ptr->from_string(min_rb, col.get()); EXPECT_EQ(st.ok(), true); - st = data_type_ptr->from_string(max_rb, col); + st = data_type_ptr->from_string(max_rb, col.get()); EXPECT_EQ(st.ok(), true); - st = data_type_ptr->from_string(rand_rb, col); + st = data_type_ptr->from_string(rand_rb, col.get()); EXPECT_EQ(st.ok(), true); string min_s_d = data_type_ptr->to_string(*col, 0); @@ -319,7 +319,7 @@ TEST(FromStringTest, ScalaWrapperFieldVsDataType) { string rand_ip = rand_wf->to_string(); ReadBuffer rand_rb(rand_ip.data(), rand_ip.size()); auto col = data_type_ptr->create_column(); - st = data_type_ptr->from_string(rand_rb, col); + st = data_type_ptr->from_string(rand_rb, col.get()); EXPECT_EQ(st.ok(), true); string rand_s_d = data_type_ptr->to_string(*col, 0); rtrim(rand_ip); @@ -336,7 +336,7 @@ TEST(FromStringTest, ScalaWrapperFieldVsDataType) { EXPECT_EQ(st.ok(), false); ReadBuffer rand_rb(pair.second.data(), pair.second.size()); auto col = data_type_ptr->create_column(); - st = data_type_ptr->from_string(rand_rb, col); + st = data_type_ptr->from_string(rand_rb, col.get()); EXPECT_EQ(st.ok(), false); } } diff --git a/be/test/vec/data_types/serde/data_type_serde_text_test.cpp b/be/test/vec/data_types/serde/data_type_serde_text_test.cpp index 2affbc36c86ab3..b65b3fc6f63d2c 100644 --- a/be/test/vec/data_types/serde/data_type_serde_text_test.cpp +++ b/be/test/vec/data_types/serde/data_type_serde_text_test.cpp @@ -510,7 +510,7 @@ TEST(TextSerde, ComplexTypeSerdeTextTest) { { // from_string ReadBuffer rb(rand_str.data(), rand_str.size()); - Status status = array_data_type_ptr->from_string(rb, col2); + Status status = array_data_type_ptr->from_string(rb, col2.get()); EXPECT_EQ(status.ok(), true); auto ser_col = ColumnString::create(); ser_col->reserve(1); @@ -661,7 +661,7 @@ TEST(TextSerde, ComplexTypeSerdeTextTest) { { ReadBuffer rb(rand_str.data(), rand_str.size()); std::cout << "from string rb: " << rb.to_string() << std::endl; - Status stat = map_data_type_ptr->from_string(rb, col2); + Status stat = map_data_type_ptr->from_string(rb, col2.get()); std::cout << stat.to_json() << std::endl; auto ser_col = ColumnString::create(); ser_col->reserve(1); @@ -840,7 +840,7 @@ TEST(TextSerde, ComplexTypeWithNestedSerdeTextTest) { // from_string ReadBuffer rb(rand_str.data(), rand_str.size()); auto col2 = array_data_type_ptr->create_column(); - Status status = array_data_type_ptr->from_string(rb, col2); + Status status = array_data_type_ptr->from_string(rb, col2.get()); if (expect_from_string_str == "") { EXPECT_EQ(status.ok(), false); std::cout << "test from_string: " << status.to_json() << std::endl; @@ -995,7 +995,7 @@ TEST(TextSerde, ComplexTypeWithNestedSerdeTextTest) { // from_string ReadBuffer rb(rand_str.data(), rand_str.size()); auto col2 = array_data_type_ptr->create_column(); - Status status = array_data_type_ptr->from_string(rb, col2); + Status status = array_data_type_ptr->from_string(rb, col2.get()); if (expect_from_string_str == "") { EXPECT_EQ(status.ok(), false); std::cout << "test from_string: " << status.to_json() << std::endl; @@ -1213,7 +1213,7 @@ TEST(TextSerde, ComplexTypeWithNestedSerdeTextTest) { // from_string ReadBuffer rb(rand_str.data(), rand_str.size()); auto col2 = map_data_type_ptr->create_column(); - Status status = map_data_type_ptr->from_string(rb, col2); + Status status = map_data_type_ptr->from_string(rb, col2.get()); if (expect_from_string_str == "") { EXPECT_EQ(status.ok(), false); std::cout << "test from_string: " << status.to_json() << std::endl; @@ -1354,7 +1354,7 @@ TEST(TextSerde, ComplexTypeWithNestedSerdeTextTest) { // from_string ReadBuffer rb(rand_str.data(), rand_str.size()); auto col2 = array_data_type_ptr->create_column(); - Status status = array_data_type_ptr->from_string(rb, col2); + Status status = array_data_type_ptr->from_string(rb, col2.get()); if (expect_from_string_str == "") { EXPECT_EQ(status.ok(), false); std::cout << "test from_string: " << status.to_json() << std::endl; diff --git a/be/test/vec/exec/concurrent_queue_order.cpp b/be/test/vec/exec/concurrent_queue_order.cpp new file mode 100644 index 00000000000000..bc3e3c7ee6a6a7 --- /dev/null +++ b/be/test/vec/exec/concurrent_queue_order.cpp @@ -0,0 +1,109 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include +#include + +#include +#include + +namespace doris::vectorized { + +class ConcurrentQueueOrder : public testing::Test { +public: + ConcurrentQueueOrder() = default; + ~ConcurrentQueueOrder() override = default; +}; +// The previously used moodycamel::ConcurrentQueue does not guarantee that the enqueue order matches the dequeue order, +// even when there is only a single producer and a single consumer. +// Refer to this issue: https://github.com/cameron314/concurrentqueue/issues/316 +// We can use tokens to ensure the correct order. +TEST_F(ConcurrentQueueOrder, test_not_guarantee_order) { + { + moodycamel::ConcurrentQueue data_queue; + int num = 0; + std::mutex m; + std::atomic_bool flag = true; + + auto task = [&](int thread_id) { + while (flag) { + std::lock_guard lc {m}; + data_queue.enqueue(num++); + } + }; + std::thread input1(task, 0); + std::thread input2(task, 1); + std::thread input3(task, 2); + + std::this_thread::sleep_for(std::chrono::milliseconds(50)); + flag = false; + + input3.join(); + input1.join(); + input2.join(); + + std::cout << "queue size " << data_queue.size_approx() << "\n"; + std::vector outputs; + int output; + while (data_queue.try_dequeue(output)) { + outputs.push_back(output); + } + + EXPECT_FALSE(std::is_sorted(outputs.begin(), outputs.end())); + std::cout << "output is sorted : " << std::is_sorted(outputs.begin(), outputs.end()) + << "\n"; + } +} + +TEST_F(ConcurrentQueueOrder, test_guarantee_order) { + { + moodycamel::ConcurrentQueue data_queue; + moodycamel::ProducerToken ptok {data_queue}; + int num = 0; + std::mutex m; + std::atomic_bool flag = true; + + auto task = [&](int thread_id) { + while (flag) { + std::lock_guard lc {m}; + data_queue.enqueue(ptok, num++); + } + }; + std::thread input1(task, 0); + std::thread input2(task, 1); + std::thread input3(task, 2); + + std::this_thread::sleep_for(std::chrono::milliseconds(50)); + flag = false; + + input3.join(); + input1.join(); + input2.join(); + + std::cout << "queue size " << data_queue.size_approx() << "\n"; + std::vector outputs; + int output; + while (data_queue.try_dequeue(output)) { + outputs.push_back(output); + } + + EXPECT_TRUE(std::is_sorted(outputs.begin(), outputs.end())); + std::cout << "output is sorted : " << std::is_sorted(outputs.begin(), outputs.end()) + << "\n"; + } +} +} // namespace doris::vectorized diff --git a/be/test/vec/function/function_test_util.h b/be/test/vec/function/function_test_util.h index c33a1d64f83111..1c4c0906b80d3e 100644 --- a/be/test/vec/function/function_test_util.h +++ b/be/test/vec/function/function_test_util.h @@ -69,7 +69,7 @@ using Row = std::pair; using DataSet = std::vector; using InputTypeSet = std::vector; -// FIXME: should use exception or expected to deal null value.w +// FIXME: should use exception or expected to deal null value. int64_t str_to_date_time(std::string datetime_str, bool data_time = true); uint32_t str_to_date_v2(std::string datetime_str, std::string datetime_format); uint64_t str_to_datetime_v2(std::string datetime_str, std::string datetime_format); @@ -315,7 +315,7 @@ Status check_function(const std::string& func_name, const InputTypeSet& input_ty // 3. check the result of function ColumnPtr column = block.get_columns()[result]; - EXPECT_TRUE(column != nullptr); + EXPECT_TRUE(column); for (int i = 0; i < row_size; ++i) { // update current line diff --git a/be/test/vec/function/function_time_test.cpp b/be/test/vec/function/function_time_test.cpp index a4299de3557608..ddfc722c7ab452 100644 --- a/be/test/vec/function/function_time_test.cpp +++ b/be/test/vec/function/function_time_test.cpp @@ -15,6 +15,7 @@ // specific language governing permissions and limitations // under the License. +#include #include #include @@ -299,14 +300,22 @@ TEST(VTimestampFunctionsTest, years_add_test) { InputTypeSet input_types = {TypeIndex::DateTime, TypeIndex::Int32}; - DataSet data_set = { - {{std::string("2020-05-23 00:00:00"), 5}, str_to_date_time("2025-05-23 00:00:00")}, - {{std::string("2020-05-23 00:00:00"), -5}, str_to_date_time("2015-05-23 00:00:00")}, - {{std::string(""), 5}, Null()}, - {{std::string("2020-05-23 00:00:00"), 8000}, Null()}, - {{Null(), 5}, Null()}}; + { + DataSet data_set = { + {{std::string("2020-05-23 00:00:00"), 5}, str_to_date_time("2025-05-23 00:00:00")}, + {{std::string("2020-05-23 00:00:00"), -5}, str_to_date_time("2015-05-23 00:00:00")}, + {{std::string(""), 5}, Null()}, + {{Null(), 5}, Null()}}; - static_cast(check_function(func_name, input_types, data_set)); + static_cast(check_function(func_name, input_types, data_set)); + } + + { + DataSet data_set = {{{std::string("2020-05-23 00:00:00"), 8000}, Null()}}; + + EXPECT_ANY_THROW(static_cast( + check_function(func_name, input_types, data_set))); + } } TEST(VTimestampFunctionsTest, years_sub_test) { @@ -314,14 +323,22 @@ TEST(VTimestampFunctionsTest, years_sub_test) { InputTypeSet input_types = {TypeIndex::DateTime, TypeIndex::Int32}; - DataSet data_set = { - {{std::string("2020-05-23 00:00:00"), 5}, str_to_date_time("2015-05-23 00:00:00")}, - {{std::string("2020-05-23 00:00:00"), -5}, str_to_date_time("2025-05-23 00:00:00")}, - {{std::string(""), 5}, Null()}, - {{std::string("2020-05-23 00:00:00"), 3000}, Null()}, - {{Null(), 5}, Null()}}; + { + DataSet data_set = { + {{std::string("2020-05-23 00:00:00"), 5}, str_to_date_time("2015-05-23 00:00:00")}, + {{std::string("2020-05-23 00:00:00"), -5}, str_to_date_time("2025-05-23 00:00:00")}, + {{std::string(""), 5}, Null()}, + {{Null(), 5}, Null()}}; - static_cast(check_function(func_name, input_types, data_set)); + static_cast(check_function(func_name, input_types, data_set)); + } + + { + DataSet data_set = {{{std::string("2020-05-23 00:00:00"), 3000}, Null()}}; + + EXPECT_ANY_THROW(static_cast( + check_function(func_name, input_types, data_set))); + } } TEST(VTimestampFunctionsTest, months_add_test) { @@ -1043,11 +1060,18 @@ TEST(VTimestampFunctionsTest, years_add_v2_test) { {{std::string("2020-05-23"), 5}, str_to_date_v2("2025-05-23", "%Y-%m-%d")}, {{std::string("2020-05-23"), -5}, str_to_date_v2("2015-05-23", "%Y-%m-%d")}, {{std::string(""), 5}, Null()}, - {{std::string("2020-05-23"), 8000}, Null()}, {{Null(), 5}, Null()}}; static_cast(check_function(func_name, input_types, data_set)); } + { + InputTypeSet input_types = {TypeIndex::DateV2, TypeIndex::Int32}; + + DataSet data_set = {{{std::string("2020-05-23"), 8000}, Null()}}; + + EXPECT_ANY_THROW(static_cast( + check_function(func_name, input_types, data_set))); + } { InputTypeSet input_types = {TypeIndex::DateTimeV2, TypeIndex::Int32}; @@ -1057,12 +1081,19 @@ TEST(VTimestampFunctionsTest, years_add_v2_test) { {{std::string("2020-05-23 00:00:11.123"), -5}, str_to_datetime_v2("2015-05-23 00:00:11.123", "%Y-%m-%d %H:%i:%s.%f")}, {{std::string(""), 5}, Null()}, - {{std::string("2020-05-23 00:00:11.123"), 8000}, Null()}, {{Null(), 5}, Null()}}; static_cast( check_function(func_name, input_types, data_set)); } + { + InputTypeSet input_types = {TypeIndex::DateTimeV2, TypeIndex::Int32}; + + DataSet data_set = {{{std::string("2020-05-23 00:00:11.123"), 8000}, Null()}}; + + EXPECT_ANY_THROW(static_cast( + check_function(func_name, input_types, data_set))); + } } TEST(VTimestampFunctionsTest, years_sub_v2_test) { @@ -1075,11 +1106,19 @@ TEST(VTimestampFunctionsTest, years_sub_v2_test) { {{std::string("2020-05-23"), 5}, str_to_date_v2("2015-05-23", "%Y-%m-%d")}, {{std::string("2020-05-23"), -5}, str_to_date_v2("2025-05-23", "%Y-%m-%d")}, {{std::string(""), 5}, Null()}, - {{std::string("2020-05-23"), 3000}, Null()}, {{Null(), 5}, Null()}}; static_cast(check_function(func_name, input_types, data_set)); } + { + InputTypeSet input_types = {TypeIndex::DateV2, TypeIndex::Int32}; + + DataSet data_set = {{{std::string("2020-05-23"), 3000}, Null()}}; + + EXPECT_ANY_THROW(static_cast( + check_function(func_name, input_types, data_set))); + } + { InputTypeSet input_types = {TypeIndex::DateTimeV2, TypeIndex::Int32}; @@ -1088,12 +1127,19 @@ TEST(VTimestampFunctionsTest, years_sub_v2_test) { {{std::string("2020-05-23 00:00:11.123"), -5}, str_to_datetime_v2("2025-05-23 00:00:11.123", "%Y-%m-%d %H:%i:%s.%f")}, {{std::string(""), 5}, Null()}, - {{std::string("2020-05-23 00:00:11.123"), 3000}, Null()}, {{Null(), 5}, Null()}}; static_cast( check_function(func_name, input_types, data_set)); } + { + InputTypeSet input_types = {TypeIndex::DateTimeV2, TypeIndex::Int32}; + + DataSet data_set = {{{std::string("2020-05-23 00:00:11.123"), 3000}, Null()}}; + + EXPECT_ANY_THROW(static_cast( + check_function(func_name, input_types, data_set))); + } } TEST(VTimestampFunctionsTest, months_add_v2_test) { diff --git a/be/test/vec/olap/char_type_padding_test.cpp b/be/test/vec/olap/char_type_padding_test.cpp index 0e4879e46a6990..dfdfea3026ecd0 100644 --- a/be/test/vec/olap/char_type_padding_test.cpp +++ b/be/test/vec/olap/char_type_padding_test.cpp @@ -40,10 +40,10 @@ TEST(CharTypePaddingTest, CharTypePaddingFullTest) { for (size_t i = 0; i < rows; i++) { input->insert_data(str.data(), str.length()); } - EXPECT_FALSE(ConvertorChar::should_padding(input, str.length())); + EXPECT_FALSE(ConvertorChar::should_padding(input.get(), str.length())); input->insert_data(str.data(), str.length() - 1); - EXPECT_TRUE(ConvertorChar::should_padding(input, str.length())); + EXPECT_TRUE(ConvertorChar::should_padding(input.get(), str.length())); } TEST(CharTypePaddingTest, CharTypePaddingDataTest) { @@ -56,7 +56,7 @@ TEST(CharTypePaddingTest, CharTypePaddingDataTest) { input->insert_data(str.data(), str.length() - i); } - auto output = ConvertorChar::clone_and_padding(input, str.length()); + auto output = ConvertorChar::clone_and_padding(input.get(), str.length()); for (int i = 0; i < rows; i++) { auto cell = output->get_data_at(i).to_string(); diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/orc_predicate_table.hql b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/orc_predicate_table.hql new file mode 100644 index 00000000000000..a946b25ff1af04 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/orc_predicate_table.hql @@ -0,0 +1,16 @@ +CREATE DATABASE IF NOT EXISTS multi_catalog; +USE multi_catalog; + +create table fixed_char_table ( + i int, + c char(2) +) stored as orc; + +insert into fixed_char_table values(1,'a'),(2,'b '), (3,'cd'); + +create table type_changed_table ( + id int, + name string +) stored as orc; +insert into type_changed_table values (1, 'Alice'), (2, 'Bob'), (3, 'Charlie'); +ALTER TABLE type_changed_table CHANGE COLUMN id id STRING; diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/run.sh b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/run.sh new file mode 100755 index 00000000000000..f934ff3009c6f2 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/run.sh @@ -0,0 +1,9 @@ +#!/bin/bash +set -x + +CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" + +# create table +hive -f "${CUR_DIR}"/orc_predicate_table.hql + + diff --git a/docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/paimon/run01.sql b/docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/paimon/run01.sql index 7aa4170eab0985..5cc0a0ea685e37 100644 --- a/docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/paimon/run01.sql +++ b/docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/paimon/run01.sql @@ -22,4 +22,37 @@ insert into test_tb_mix_format values (1,1,'b'),(2,1,'b'),(3,1,'b'),(4,1,'b'),(5 -- update some data, these splits will be readed by jni insert into test_tb_mix_format values (1,2,'b'),(2,2,'b'),(3,2,'b'),(4,2,'b'),(5,2,'b'); -- delete foramt in table properties, doris should get format by file name -alter table test_tb_mix_format unset TBLPROPERTIES ('file.format'); \ No newline at end of file +alter table test_tb_mix_format unset TBLPROPERTIES ('file.format'); + +drop table if exists two_partition; +CREATE TABLE two_partition ( + id BIGINT, + create_date STRING, + region STRING +) PARTITIONED BY (create_date,region) TBLPROPERTIES ( + 'primary-key' = 'create_date,region,id', + 'bucket'=10, + 'file.format'='orc' +); + +insert into two_partition values(1,'2020-01-01','bj'); +insert into two_partition values(2,'2020-01-01','sh'); +insert into two_partition values(3,'2038-01-01','bj'); +insert into two_partition values(4,'2038-01-01','sh'); +insert into two_partition values(5,'2038-01-02','bj'); + +drop table if exists null_partition; +CREATE TABLE null_partition ( + id BIGINT, + region STRING +) PARTITIONED BY (region) TBLPROPERTIES ( + 'primary-key' = 'region,id', + 'bucket'=10, + 'file.format'='orc' +); +-- null NULL "null" all will be in partition [null] +insert into null_partition values(1,'bj'); +insert into null_partition values(2,null); +insert into null_partition values(3,NULL); +insert into null_partition values(4,'null'); +insert into null_partition values(5,'NULL'); \ No newline at end of file diff --git a/fe/check/checkstyle/checkstyle.xml b/fe/check/checkstyle/checkstyle.xml index 39a1e5c569fd6a..663f17df0f583d 100644 --- a/fe/check/checkstyle/checkstyle.xml +++ b/fe/check/checkstyle/checkstyle.xml @@ -431,6 +431,10 @@ under the License. value="WhitespaceAround: ''{0}'' is not preceded with whitespace."/> + + + + diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 index 97876c231fec69..37e1c68cefb91c 100644 --- a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 +++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 @@ -870,11 +870,11 @@ supportedUnsetStatement supportedUseStatement : SWITCH catalog=identifier #switchCatalog + | USE (catalog=identifier DOT)? database=identifier #useDatabase ; unsupportedUseStatement - : USE (catalog=identifier DOT)? database=identifier #useDatabase - | USE ((catalog=identifier DOT)? database=identifier)? ATSIGN cluster=identifier #useCloudCluster + : USE ((catalog=identifier DOT)? database=identifier)? ATSIGN cluster=identifier #useCloudCluster ; unsupportedDmlStatement @@ -1476,7 +1476,9 @@ rowConstructor ; rowConstructorItem - : namedExpression | DEFAULT + : constant // duplicate constant rule for improve the parse of `insert into tbl values` + | DEFAULT + | namedExpression ; predicate @@ -1678,7 +1680,7 @@ constant | LEFT_BRACE (items+=constant COLON items+=constant)? (COMMA items+=constant COLON items+=constant)* RIGHT_BRACE #mapLiteral | LEFT_BRACE items+=constant (COMMA items+=constant)* RIGHT_BRACE #structLiteral - | PLACEHOLDER #placeholder + | PLACEHOLDER #placeholder ; comparisonOperator diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/RangePartitionItem.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/RangePartitionItem.java index 96bf0097c28a51..cad6ca38130420 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/RangePartitionItem.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/RangePartitionItem.java @@ -65,14 +65,9 @@ public boolean isDefaultPartition() { @Override public PartitionKeyDesc toPartitionKeyDesc() { - if (partitionKeyRange.hasLowerBound()) { - return PartitionKeyDesc.createFixed( + return PartitionKeyDesc.createFixed( PartitionInfo.toPartitionValue(partitionKeyRange.lowerEndpoint()), PartitionInfo.toPartitionValue(partitionKeyRange.upperEndpoint())); - } else { - // For null partition value. - return PartitionKeyDesc.createLessThan(PartitionInfo.toPartitionValue(partitionKeyRange.upperEndpoint())); - } } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/clone/TabletSchedCtx.java b/fe/fe-core/src/main/java/org/apache/doris/clone/TabletSchedCtx.java index a6ba294e80934c..b8a098cc891dee 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/clone/TabletSchedCtx.java +++ b/fe/fe-core/src/main/java/org/apache/doris/clone/TabletSchedCtx.java @@ -62,6 +62,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; /* * TabletSchedCtx contains all information which is created during tablet scheduler processing. @@ -69,28 +70,6 @@ public class TabletSchedCtx implements Comparable { private static final Logger LOG = LogManager.getLogger(TabletSchedCtx.class); - /* - * SCHED_FAILED_COUNTER_THRESHOLD: - * threshold of times a tablet failed to be scheduled - * - * MIN_ADJUST_PRIORITY_INTERVAL_MS: - * min interval time of adjusting a tablet's priority - * - * MAX_NOT_BEING_SCHEDULED_INTERVAL_MS: - * max gap time of a tablet NOT being scheduled. - * - * These 3 params is for adjusting priority. - * If a tablet being scheduled failed for more than SCHED_FAILED_COUNTER_THRESHOLD times, its priority - * will be downgraded. And the interval between adjustment is larger than MIN_ADJUST_PRIORITY_INTERVAL_MS, - * to avoid being downgraded too soon. - * And if a tablet is not being scheduled longer than MAX_NOT_BEING_SCHEDULED_INTERVAL_MS, its priority - * will be upgraded, to avoid starvation. - * - */ - private static final int SCHED_FAILED_COUNTER_THRESHOLD = 5; - private static final long MIN_ADJUST_PRIORITY_INTERVAL_MS = 5 * 60 * 1000L; // 5 min - private static final long MAX_NOT_BEING_SCHEDULED_INTERVAL_MS = 30 * 60 * 1000L; // 30 min - /* * A clone task timeout is between Config.min_clone_task_timeout_sec and Config.max_clone_task_timeout_sec, * estimated by tablet size / MIN_CLONE_SPEED_MB_PER_SECOND. @@ -450,10 +429,6 @@ public void setSchedFailedCode(SubCode code) { schedFailedCode = code; } - public CloneTask getCloneTask() { - return cloneTask; - } - public long getCopySize() { return copySize; } @@ -932,12 +907,14 @@ public void releaseResource(TabletScheduler tabletScheduler, boolean reserveTabl } if (cloneTask != null) { AgentTaskQueue.removeTask(cloneTask.getBackendId(), TTaskType.CLONE, cloneTask.getSignature()); + cloneTask = null; // clear all CLONE replicas Database db = Env.getCurrentInternalCatalog().getDbNullable(dbId); if (db != null) { Table table = db.getTableNullable(tblId); - if (table != null && table.writeLockIfExist()) { + // try get table write lock, if failed TabletScheduler will try next time + if (table != null && table.tryWriteLockIfExist(Table.TRY_LOCK_TIMEOUT_MS, TimeUnit.MILLISECONDS)) { try { List cloneReplicas = Lists.newArrayList(); tablet.getReplicas().stream().filter(r -> r.getState() == ReplicaState.CLONE).forEach(r -> { diff --git a/fe/fe-core/src/main/java/org/apache/doris/clone/TabletScheduler.java b/fe/fe-core/src/main/java/org/apache/doris/clone/TabletScheduler.java index 1545236aa59cd0..dc07ddb0be4d30 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/clone/TabletScheduler.java +++ b/fe/fe-core/src/main/java/org/apache/doris/clone/TabletScheduler.java @@ -105,9 +105,6 @@ public class TabletScheduler extends MasterDaemon { private static final Logger LOG = LogManager.getLogger(TabletScheduler.class); - // handle at most BATCH_NUM tablets in one loop - private static final int MIN_BATCH_NUM = 50; - // the minimum interval of updating cluster statistics and priority of tablet info private static final long STAT_UPDATE_INTERVAL_MS = 20 * 1000; // 20s @@ -151,7 +148,7 @@ public enum AddResult { ADDED, // success to add ALREADY_IN, // already added, skip LIMIT_EXCEED, // number of pending tablets exceed the limit - REPLACE_ADDED, // succ to add, and envit a lowest task + REPLACE_ADDED, // succ to add, and evict a lowest task DISABLED // scheduler has been disabled. } @@ -292,7 +289,7 @@ public synchronized AddResult addTablet(TabletSchedCtx tablet, boolean force) { addResult = AddResult.REPLACE_ADDED; pendingTablets.pollLast(); finalizeTabletCtx(lowestPriorityTablet, TabletSchedCtx.State.CANCELLED, Status.UNRECOVERABLE, - "envit lower priority sched tablet because pending queue is full"); + "evict lower priority sched tablet because pending queue is full"); } if (!contains || tablet.getType() == TabletSchedCtx.Type.REPAIR) { @@ -1868,9 +1865,9 @@ public boolean finishCloneTask(CloneTask cloneTask, TFinishTaskRequest request) tabletCtx.increaseFailedRunningCounter(); if (!tabletCtx.isExceedFailedRunningLimit()) { stat.counterCloneTaskFailed.incrementAndGet(); + tabletCtx.setState(TabletSchedCtx.State.PENDING); tabletCtx.releaseResource(this); tabletCtx.resetFailedSchedCounter(); - tabletCtx.setState(TabletSchedCtx.State.PENDING); addBackToPendingTablets(tabletCtx); return false; } else { diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/profile/SummaryProfile.java b/fe/fe-core/src/main/java/org/apache/doris/common/profile/SummaryProfile.java index 6a92e043b6eb20..5b0d5ba353387f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/common/profile/SummaryProfile.java +++ b/fe/fe-core/src/main/java/org/apache/doris/common/profile/SummaryProfile.java @@ -600,7 +600,9 @@ public void setQueryDistributedFinishTime() { } public void setQueryPlanFinishTime() { - this.queryPlanFinishTime = TimeUtils.getStartTimeMs(); + if (queryPlanFinishTime == -1) { + this.queryPlanFinishTime = TimeUtils.getStartTimeMs(); + } } public void setQueryScheduleFinishTime() { diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/util/FetchRemoteTabletSchemaUtil.java b/fe/fe-core/src/main/java/org/apache/doris/common/util/FetchRemoteTabletSchemaUtil.java index 4a0b9d1ff5950d..00147207c143db 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/common/util/FetchRemoteTabletSchemaUtil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/common/util/FetchRemoteTabletSchemaUtil.java @@ -98,16 +98,19 @@ public List fetch() { if (!backend.isAlive()) { continue; } - // need 2 be to provide a retry - if (coordinatorBackend.size() < 2) { - coordinatorBackend.add(backend); - } + coordinatorBackend.add(backend); PTabletsLocation.Builder locationBuilder = PTabletsLocation.newBuilder() .setHost(backend.getHost()) .setBrpcPort(backend.getBrpcPort()); PTabletsLocation location = locationBuilder.addAllTabletId(tabletIds).build(); locations.add(location); } + // pick 2 random coordinator + Collections.shuffle(coordinatorBackend); + if (!coordinatorBackend.isEmpty()) { + coordinatorBackend = coordinatorBackend.subList(0, Math.min(2, coordinatorBackend.size())); + LOG.debug("pick coordinator backend {}", coordinatorBackend.get(0)); + } PFetchRemoteSchemaRequest.Builder requestBuilder = PFetchRemoteSchemaRequest.newBuilder() .addAllTabletLocation(locations) .setIsCoordinator(true); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java index 2575169f79207f..d1df51177fd496 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java @@ -154,7 +154,7 @@ public abstract class ExternalCatalog protected PreExecutionAuthenticator preExecutionAuthenticator; private volatile Configuration cachedConf = null; - private final byte[] confLock = new byte[0]; + private byte[] confLock = new byte[0]; public ExternalCatalog() { } @@ -784,6 +784,7 @@ public void gsonPostProcess() throws IOException { } } this.propLock = new byte[0]; + this.confLock = new byte[0]; this.initialized = false; setDefaultPropsIfMissing(true); if (tableAutoAnalyzePolicy == null) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalTable.java index da4670d6d0589d..a6fb486bed9c65 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalTable.java @@ -543,7 +543,7 @@ public Optional initSchema() { } private List getIcebergSchema() { - return IcebergUtils.getSchema(catalog, dbName, name); + return IcebergUtils.getSchema(catalog, dbName, name, IcebergUtils.UNKNOWN_SNAPSHOT_ID); } private List getHudiSchema() { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/HudiScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/HudiScanNode.java index 486fdea74a00bb..b1eb47095f33c4 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/HudiScanNode.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/HudiScanNode.java @@ -91,8 +91,6 @@ public class HudiScanNode extends HiveScanNode { private final AtomicLong noLogsSplitNum = new AtomicLong(0); - private final boolean useHiveSyncPartition; - private HoodieTableMetaClient hudiClient; private String basePath; private String inputFormat; @@ -102,7 +100,6 @@ public class HudiScanNode extends HiveScanNode { private boolean partitionInit = false; private HoodieTimeline timeline; - private Option snapshotTimestamp; private String queryInstant; private final AtomicReference batchException = new AtomicReference<>(null); @@ -113,7 +110,6 @@ public class HudiScanNode extends HiveScanNode { private boolean incrementalRead = false; private TableScanParams scanParams; private IncrementalRelation incrementalRelation; - private SessionVariable sessionVariable; /** * External file scan node for Query Hudi table @@ -125,8 +121,8 @@ public class HudiScanNode extends HiveScanNode { */ public HudiScanNode(PlanNodeId id, TupleDescriptor desc, boolean needCheckColumnPriv, Optional scanParams, Optional incrementalRelation, - SessionVariable sessionVariable) { - super(id, desc, "HUDI_SCAN_NODE", StatisticalType.HUDI_SCAN_NODE, needCheckColumnPriv, sessionVariable); + SessionVariable sv) { + super(id, desc, "HUDI_SCAN_NODE", StatisticalType.HUDI_SCAN_NODE, needCheckColumnPriv, sv); isCowTable = hmsTable.isHoodieCowTable(); if (LOG.isDebugEnabled()) { if (isCowTable) { @@ -136,11 +132,9 @@ public HudiScanNode(PlanNodeId id, TupleDescriptor desc, boolean needCheckColumn hmsTable.getFullQualifiers()); } } - useHiveSyncPartition = hmsTable.useHiveSyncPartition(); this.scanParams = scanParams.orElse(null); this.incrementalRelation = incrementalRelation.orElse(null); this.incrementalRead = (this.scanParams != null && this.scanParams.incrementalRead()); - this.sessionVariable = sessionVariable; } @Override @@ -215,7 +209,6 @@ protected void doInitialize() throws UserException { throw new UserException("Hudi does not support `FOR VERSION AS OF`, please use `FOR TIME AS OF`"); } queryInstant = tableSnapshot.getTime().replaceAll("[-: ]", ""); - snapshotTimestamp = Option.of(queryInstant); } else { Option snapshotInstant = timeline.lastInstant(); if (!snapshotInstant.isPresent()) { @@ -224,7 +217,6 @@ protected void doInitialize() throws UserException { return; } queryInstant = snapshotInstant.get().getTimestamp(); - snapshotTimestamp = Option.empty(); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java index e259399f63740b..7f7d2fdf578292 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java @@ -27,9 +27,14 @@ import org.apache.doris.catalog.RangePartitionItem; import org.apache.doris.common.AnalysisException; import org.apache.doris.common.DdlException; +import org.apache.doris.datasource.CacheException; +import org.apache.doris.datasource.ExternalSchemaCache; +import org.apache.doris.datasource.ExternalSchemaCache.SchemaCacheKey; import org.apache.doris.datasource.ExternalTable; import org.apache.doris.datasource.SchemaCacheValue; import org.apache.doris.datasource.mvcc.MvccSnapshot; +import org.apache.doris.datasource.mvcc.MvccTable; +import org.apache.doris.datasource.mvcc.MvccUtil; import org.apache.doris.mtmv.MTMVBaseTableIf; import org.apache.doris.mtmv.MTMVRefreshContext; import org.apache.doris.mtmv.MTMVRelatedTableIf; @@ -77,7 +82,7 @@ import java.util.Set; import java.util.stream.Collectors; -public class IcebergExternalTable extends ExternalTable implements MTMVRelatedTableIf, MTMVBaseTableIf { +public class IcebergExternalTable extends ExternalTable implements MTMVRelatedTableIf, MTMVBaseTableIf, MvccTable { public static final String YEAR = "year"; public static final String MONTH = "month"; @@ -117,39 +122,23 @@ public void setPartitionColumns(List partitionColumns) { } @Override - public Optional initSchema() { - table = IcebergUtils.getIcebergTable(catalog, dbName, name); - List schema = IcebergUtils.getSchema(catalog, dbName, name); - Snapshot snapshot = table.currentSnapshot(); - if (snapshot == null) { - LOG.debug("Table {} is empty", name); - return Optional.of(new IcebergSchemaCacheValue(schema, null, -1, null)); - } - long snapshotId = snapshot.snapshotId(); - partitionColumns = null; - IcebergPartitionInfo partitionInfo = null; - if (isValidRelatedTable()) { - PartitionSpec spec = table.spec(); - partitionColumns = Lists.newArrayList(); - - // For iceberg table, we only support table with 1 partition column as RelatedTable. - // So we use spec.fields().get(0) to get the partition column. - Types.NestedField col = table.schema().findField(spec.fields().get(0).sourceId()); + public Optional initSchema(SchemaCacheKey key) { + table = getIcebergTable(); + List schema = IcebergUtils.getSchema(catalog, dbName, name, + ((IcebergSchemaCacheKey) key).getSchemaId()); + List tmpColumns = Lists.newArrayList(); + PartitionSpec spec = table.spec(); + for (PartitionField field : spec.fields()) { + Types.NestedField col = table.schema().findField(field.sourceId()); for (Column c : schema) { if (c.getName().equalsIgnoreCase(col.name())) { - partitionColumns.add(c); + tmpColumns.add(c); break; } } - Preconditions.checkState(partitionColumns.size() == 1, - "Support 1 partition column for iceberg table, but found " + partitionColumns.size()); - try { - partitionInfo = loadPartitionInfo(); - } catch (AnalysisException e) { - LOG.warn("Failed to load iceberg table {} partition info.", name, e); - } } - return Optional.of(new IcebergSchemaCacheValue(schema, partitionColumns, snapshotId, partitionInfo)); + partitionColumns = tmpColumns; + return Optional.of(new IcebergSchemaCacheValue(schema, partitionColumns)); } @Override @@ -187,6 +176,11 @@ public Table getIcebergTable() { return IcebergUtils.getIcebergTable(getCatalog(), getDbName(), getName()); } + private IcebergSnapshotCacheValue getIcebergSnapshotCacheValue() { + return Env.getCurrentEnv().getExtMetaCacheMgr().getIcebergMetadataCache() + .getSnapshotCache(catalog, dbName, name); + } + @Override public void beforeMTMVRefresh(MTMV mtmv) throws DdlException { Env.getCurrentEnv().getRefreshManager() @@ -195,46 +189,36 @@ public void beforeMTMVRefresh(MTMV mtmv) throws DdlException { @Override public Map getAndCopyPartitionItems(Optional snapshot) { - return Maps.newHashMap(getPartitionInfoFromCache().getNameToPartitionItem()); + return Maps.newHashMap(getOrFetchSnapshotCacheValue(snapshot).getPartitionInfo().getNameToPartitionItem()); } - private IcebergPartitionInfo getPartitionInfoFromCache() { - makeSureInitialized(); - Optional schemaCacheValue = getSchemaCacheValue(); - if (!schemaCacheValue.isPresent()) { - return new IcebergPartitionInfo(); - } - return ((IcebergSchemaCacheValue) schemaCacheValue.get()).getPartitionInfo(); + @Override + public Map getNameToPartitionItems(Optional snapshot) { + return getOrFetchSnapshotCacheValue(snapshot).getPartitionInfo().getNameToPartitionItem(); } @Override public PartitionType getPartitionType(Optional snapshot) { - makeSureInitialized(); return isValidRelatedTable() ? PartitionType.RANGE : PartitionType.UNPARTITIONED; } @Override public Set getPartitionColumnNames(Optional snapshot) throws DdlException { - return getPartitionColumnsFromCache().stream().map(Column::getName).collect(Collectors.toSet()); + return getPartitionColumns(snapshot).stream().map(Column::getName).collect(Collectors.toSet()); } @Override public List getPartitionColumns(Optional snapshot) { - return getPartitionColumnsFromCache(); - } - - private List getPartitionColumnsFromCache() { - makeSureInitialized(); - Optional schemaCacheValue = getSchemaCacheValue(); - return schemaCacheValue - .map(cacheValue -> ((IcebergSchemaCacheValue) cacheValue).getPartitionColumns()) - .orElseGet(Lists::newArrayList); + IcebergSnapshotCacheValue snapshotValue = getOrFetchSnapshotCacheValue(snapshot); + IcebergSchemaCacheValue schemaValue = getIcebergSchemaCacheValue(snapshotValue.getSnapshot().getSchemaId()); + return schemaValue.getPartitionColumns(); } @Override public MTMVSnapshotIf getPartitionSnapshot(String partitionName, MTMVRefreshContext context, Optional snapshot) throws AnalysisException { - long latestSnapshotId = getPartitionInfoFromCache().getLatestSnapshotId(partitionName); + IcebergSnapshotCacheValue snapshotValue = getOrFetchSnapshotCacheValue(snapshot); + long latestSnapshotId = snapshotValue.getPartitionInfo().getLatestSnapshotId(partitionName); if (latestSnapshotId <= 0) { throw new AnalysisException("can not find partition: " + partitionName); } @@ -244,16 +228,9 @@ public MTMVSnapshotIf getPartitionSnapshot(String partitionName, MTMVRefreshCont @Override public MTMVSnapshotIf getTableSnapshot(MTMVRefreshContext context, Optional snapshot) throws AnalysisException { - return new MTMVVersionSnapshot(getLatestSnapshotIdFromCache()); - } - - public long getLatestSnapshotIdFromCache() throws AnalysisException { makeSureInitialized(); - Optional schemaCacheValue = getSchemaCacheValue(); - if (!schemaCacheValue.isPresent()) { - throw new AnalysisException("Can't find schema cache of table " + name); - } - return ((IcebergSchemaCacheValue) schemaCacheValue.get()).getSnapshotId(); + IcebergSnapshotCacheValue snapshotValue = getOrFetchSnapshotCacheValue(snapshot); + return new MTMVVersionSnapshot(snapshotValue.getSnapshot().getSnapshotId()); } @Override @@ -268,11 +245,13 @@ public boolean isPartitionColumnAllowNull() { */ @Override public boolean isValidRelatedTable() { + makeSureInitialized(); if (isValidRelatedTableCached) { return isValidRelatedTable; } isValidRelatedTable = false; Set allFields = Sets.newHashSet(); + table = getIcebergTable(); for (PartitionSpec spec : table.specs().values()) { if (spec == null) { isValidRelatedTableCached = true; @@ -299,14 +278,62 @@ public boolean isValidRelatedTable() { return isValidRelatedTable; } - protected IcebergPartitionInfo loadPartitionInfo() throws AnalysisException { - List icebergPartitions = loadIcebergPartition(); + @Override + public MvccSnapshot loadSnapshot() { + return new IcebergMvccSnapshot(getIcebergSnapshotCacheValue()); + } + + public long getLatestSnapshotId() { + table = getIcebergTable(); + Snapshot snapshot = table.currentSnapshot(); + return snapshot == null ? IcebergUtils.UNKNOWN_SNAPSHOT_ID : table.currentSnapshot().snapshotId(); + } + + public long getSchemaId(long snapshotId) { + table = getIcebergTable(); + return snapshotId == IcebergUtils.UNKNOWN_SNAPSHOT_ID + ? IcebergUtils.UNKNOWN_SNAPSHOT_ID + : table.snapshot(snapshotId).schemaId(); + } + + @Override + public List getFullSchema() { + Optional snapshotFromContext = MvccUtil.getSnapshotFromContext(this); + IcebergSnapshotCacheValue cacheValue = getOrFetchSnapshotCacheValue(snapshotFromContext); + return getIcebergSchemaCacheValue(cacheValue.getSnapshot().getSchemaId()).getSchema(); + } + + @Override + public boolean supportInternalPartitionPruned() { + return true; + } + + public IcebergSchemaCacheValue getIcebergSchemaCacheValue(long schemaId) { + ExternalSchemaCache cache = Env.getCurrentEnv().getExtMetaCacheMgr().getSchemaCache(catalog); + Optional schemaCacheValue = cache.getSchemaValue( + new IcebergSchemaCacheKey(dbName, name, schemaId)); + if (!schemaCacheValue.isPresent()) { + throw new CacheException("failed to getSchema for: %s.%s.%s.%s", + null, catalog.getName(), dbName, name, schemaId); + } + return (IcebergSchemaCacheValue) schemaCacheValue.get(); + } + + public IcebergPartitionInfo loadPartitionInfo(long snapshotId) throws AnalysisException { + // snapshotId == UNKNOWN_SNAPSHOT_ID means this is an empty table, haven't contained any snapshot yet. + if (!isValidRelatedTable() || snapshotId == IcebergUtils.UNKNOWN_SNAPSHOT_ID) { + return new IcebergPartitionInfo(); + } + List icebergPartitions = loadIcebergPartition(snapshotId); Map nameToPartition = Maps.newHashMap(); Map nameToPartitionItem = Maps.newHashMap(); + table = getIcebergTable(); + partitionColumns = getIcebergSchemaCacheValue(table.snapshot(snapshotId).schemaId()).getPartitionColumns(); for (IcebergPartition partition : icebergPartitions) { nameToPartition.put(partition.getPartitionName(), partition); String transform = table.specs().get(partition.getSpecId()).fields().get(0).transform().toString(); - Range partitionRange = getPartitionRange(partition.getPartitionValues().get(0), transform); + Range partitionRange = getPartitionRange( + partition.getPartitionValues().get(0), transform, partitionColumns); PartitionItem item = new RangePartitionItem(partitionRange); nameToPartitionItem.put(partition.getPartitionName(), item); } @@ -314,11 +341,11 @@ protected IcebergPartitionInfo loadPartitionInfo() throws AnalysisException { return new IcebergPartitionInfo(nameToPartitionItem, nameToPartition, partitionNameMap); } - public List loadIcebergPartition() { + public List loadIcebergPartition(long snapshotId) { PartitionsTable partitionsTable = (PartitionsTable) MetadataTableUtils .createMetadataTableInstance(table, MetadataTableType.PARTITIONS); List partitions = Lists.newArrayList(); - try (CloseableIterable tasks = partitionsTable.newScan().planFiles()) { + try (CloseableIterable tasks = partitionsTable.newScan().useSnapshot(snapshotId).planFiles()) { for (FileScanTask task : tasks) { CloseableIterable rows = task.asDataTask().rows(); for (StructLike row : rows) { @@ -344,6 +371,7 @@ public IcebergPartition generateIcebergPartition(StructLike row) { // 8. equality_delete_file_count, // 9. last_updated_at, // 10. last_updated_snapshot_id + table = getIcebergTable(); Preconditions.checkState(!table.spec().fields().isEmpty(), table.name() + " is not a partition table."); int specId = row.get(1, Integer.class); PartitionSpec partitionSpec = table.specs().get(specId); @@ -382,13 +410,14 @@ public IcebergPartition generateIcebergPartition(StructLike row) { } @VisibleForTesting - public Range getPartitionRange(String value, String transform) + public Range getPartitionRange(String value, String transform, List partitionColumns) throws AnalysisException { - // For NULL value, create a lessThan partition for it. + // For NULL value, create a minimum partition for it. if (value == null) { - PartitionKey nullKey = PartitionKey.createPartitionKey( - Lists.newArrayList(new PartitionValue("0000-01-02")), partitionColumns); - return Range.lessThan(nullKey); + PartitionKey nullLowKey = PartitionKey.createPartitionKey( + Lists.newArrayList(new PartitionValue("0000-01-01")), partitionColumns); + PartitionKey nullUpKey = nullLowKey.successor(); + return Range.closedOpen(nullLowKey, nullUpKey); } LocalDateTime epoch = Instant.EPOCH.atZone(ZoneId.of("UTC")).toLocalDateTime(); LocalDateTime target; @@ -525,4 +554,12 @@ public boolean validRelatedTableCache() { public void setIsValidRelatedTableCached(boolean isCached) { this.isValidRelatedTableCached = isCached; } + + private IcebergSnapshotCacheValue getOrFetchSnapshotCacheValue(Optional snapshot) { + if (snapshot.isPresent()) { + return ((IcebergMvccSnapshot) snapshot.get()).getSnapshotCacheValue(); + } else { + return getIcebergSnapshotCacheValue(); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java index ad347ca78f2a4f..e80a013cc92195 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java @@ -18,6 +18,7 @@ package org.apache.doris.datasource.iceberg; import org.apache.doris.catalog.Env; +import org.apache.doris.common.AnalysisException; import org.apache.doris.common.CacheFactory; import org.apache.doris.common.Config; import org.apache.doris.common.UserException; @@ -49,6 +50,7 @@ public class IcebergMetadataCache { private final LoadingCache> snapshotListCache; private final LoadingCache tableCache; + private final LoadingCache snapshotCache; public IcebergMetadataCache(ExecutorService executor) { CacheFactory snapshotListCacheFactory = new CacheFactory( @@ -66,6 +68,14 @@ public IcebergMetadataCache(ExecutorService executor) { true, null); this.tableCache = tableCacheFactory.buildCache(key -> loadTable(key), null, executor); + + CacheFactory snapshotCacheFactory = new CacheFactory( + OptionalLong.of(28800L), + OptionalLong.of(Config.external_cache_expire_time_minutes_after_access * 60), + Config.max_external_table_cache_num, + true, + null); + this.snapshotCache = snapshotCacheFactory.buildCache(key -> loadSnapshot(key), null, executor); } public List getSnapshotList(TIcebergMetadataParams params) throws UserException { @@ -92,6 +102,11 @@ public Table getAndCloneTable(CatalogIf catalog, String dbName, String tbName) { return restTable; } + public IcebergSnapshotCacheValue getSnapshotCache(CatalogIf catalog, String dbName, String tbName) { + IcebergMetadataCacheKey key = IcebergMetadataCacheKey.of(catalog, dbName, tbName); + return snapshotCache.get(key); + } + @NotNull private List loadSnapshots(IcebergMetadataCacheKey key) { Table icebergTable = getIcebergTable(key.catalog, key.dbName, key.tableName); @@ -114,6 +129,16 @@ private Table loadTable(IcebergMetadataCacheKey key) { () -> ops.loadTable(key.dbName, key.tableName)); } + @NotNull + private IcebergSnapshotCacheValue loadSnapshot(IcebergMetadataCacheKey key) throws AnalysisException { + IcebergExternalTable table = (IcebergExternalTable) key.catalog.getDbOrAnalysisException(key.dbName) + .getTableOrAnalysisException(key.tableName); + long snapshotId = table.getLatestSnapshotId(); + long schemaId = table.getSchemaId(snapshotId); + IcebergPartitionInfo icebergPartitionInfo = table.loadPartitionInfo(snapshotId); + return new IcebergSnapshotCacheValue(icebergPartitionInfo, new IcebergSnapshot(snapshotId, schemaId)); + } + public void invalidateCatalogCache(long catalogId) { snapshotListCache.asMap().keySet().stream() .filter(key -> key.catalog.getId() == catalogId) @@ -125,6 +150,10 @@ public void invalidateCatalogCache(long catalogId) { ManifestFiles.dropCache(entry.getValue().io()); tableCache.invalidate(entry.getKey()); }); + + snapshotCache.asMap().keySet().stream() + .filter(key -> key.catalog.getId() == catalogId) + .forEach(snapshotCache::invalidate); } public void invalidateTableCache(long catalogId, String dbName, String tblName) { @@ -143,6 +172,11 @@ public void invalidateTableCache(long catalogId, String dbName, String tblName) ManifestFiles.dropCache(entry.getValue().io()); tableCache.invalidate(entry.getKey()); }); + + snapshotCache.asMap().keySet().stream() + .filter(key -> key.catalog.getId() == catalogId && key.dbName.equals(dbName) && key.tableName.equals( + tblName)) + .forEach(snapshotCache::invalidate); } public void invalidateDbCache(long catalogId, String dbName) { @@ -159,6 +193,10 @@ public void invalidateDbCache(long catalogId, String dbName) { ManifestFiles.dropCache(entry.getValue().io()); tableCache.invalidate(entry.getKey()); }); + + snapshotCache.asMap().keySet().stream() + .filter(key -> key.catalog.getId() == catalogId && key.dbName.equals(dbName)) + .forEach(snapshotCache::invalidate); } private static void initIcebergTableFileIO(Table table, Map props) { @@ -212,10 +250,12 @@ public int hashCode() { public Map> getCacheStats() { Map> res = Maps.newHashMap(); - res.put("iceberg_snapshot_cache", ExternalMetaCacheMgr.getCacheStats(snapshotListCache.stats(), + res.put("iceberg_snapshot_list_cache", ExternalMetaCacheMgr.getCacheStats(snapshotListCache.stats(), snapshotListCache.estimatedSize())); res.put("iceberg_table_cache", ExternalMetaCacheMgr.getCacheStats(tableCache.stats(), tableCache.estimatedSize())); + res.put("iceberg_snapshot_cache", ExternalMetaCacheMgr.getCacheStats(snapshotCache.stats(), + snapshotCache.estimatedSize())); return res; } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMvccSnapshot.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMvccSnapshot.java new file mode 100644 index 00000000000000..2c0155a71cd389 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMvccSnapshot.java @@ -0,0 +1,32 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.iceberg; + +import org.apache.doris.datasource.mvcc.MvccSnapshot; + +public class IcebergMvccSnapshot implements MvccSnapshot { + private final IcebergSnapshotCacheValue snapshotCacheValue; + + public IcebergMvccSnapshot(IcebergSnapshotCacheValue snapshotCacheValue) { + this.snapshotCacheValue = snapshotCacheValue; + } + + public IcebergSnapshotCacheValue getSnapshotCacheValue() { + return snapshotCacheValue; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheKey.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheKey.java new file mode 100644 index 00000000000000..7931d91831fcec --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheKey.java @@ -0,0 +1,55 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.iceberg; + +import org.apache.doris.datasource.ExternalSchemaCache.SchemaCacheKey; + +import com.google.common.base.Objects; + +public class IcebergSchemaCacheKey extends SchemaCacheKey { + private final long schemaId; + + public IcebergSchemaCacheKey(String dbName, String tableName, long schemaId) { + super(dbName, tableName); + this.schemaId = schemaId; + } + + public long getSchemaId() { + return schemaId; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof IcebergSchemaCacheKey)) { + return false; + } + if (!super.equals(o)) { + return false; + } + IcebergSchemaCacheKey that = (IcebergSchemaCacheKey) o; + return schemaId == that.schemaId; + } + + @Override + public int hashCode() { + return Objects.hashCode(super.hashCode(), schemaId); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheValue.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheValue.java index e1fde8049fe1ad..ccfcaab0c7261d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheValue.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheValue.java @@ -25,26 +25,13 @@ public class IcebergSchemaCacheValue extends SchemaCacheValue { private final List partitionColumns; - private final IcebergPartitionInfo partitionInfo; - private final long snapshotId; - public IcebergSchemaCacheValue(List schema, List partitionColumns, - long snapshotId, IcebergPartitionInfo partitionInfo) { + public IcebergSchemaCacheValue(List schema, List partitionColumns) { super(schema); this.partitionColumns = partitionColumns; - this.snapshotId = snapshotId; - this.partitionInfo = partitionInfo; } public List getPartitionColumns() { return partitionColumns; } - - public IcebergPartitionInfo getPartitionInfo() { - return partitionInfo; - } - - public long getSnapshotId() { - return snapshotId; - } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshot.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshot.java new file mode 100644 index 00000000000000..5903c362d7434e --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshot.java @@ -0,0 +1,36 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.iceberg; + +public class IcebergSnapshot { + private final long snapshotId; + private final long schemaId; + + public IcebergSnapshot(long snapshotId, long schemaId) { + this.snapshotId = snapshotId; + this.schemaId = schemaId; + } + + public long getSnapshotId() { + return snapshotId; + } + + public long getSchemaId() { + return schemaId; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshotCacheValue.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshotCacheValue.java new file mode 100644 index 00000000000000..95c9a6f26cc5c5 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshotCacheValue.java @@ -0,0 +1,37 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.iceberg; + +public class IcebergSnapshotCacheValue { + + private final IcebergPartitionInfo partitionInfo; + private final IcebergSnapshot snapshot; + + public IcebergSnapshotCacheValue(IcebergPartitionInfo partitionInfo, IcebergSnapshot snapshot) { + this.partitionInfo = partitionInfo; + this.snapshot = snapshot; + } + + public IcebergPartitionInfo getPartitionInfo() { + return partitionInfo; + } + + public IcebergSnapshot getSnapshot() { + return snapshot; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java index ba6d628e492c20..a7507fe031ff68 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java @@ -52,6 +52,7 @@ import org.apache.doris.nereids.exceptions.NotSupportedException; import org.apache.doris.thrift.TExprOpcode; +import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import org.apache.iceberg.CatalogProperties; import org.apache.iceberg.FileFormat; @@ -107,6 +108,8 @@ public Integer initialValue() { // nickname in spark public static final String SPARK_SQL_COMPRESSION_CODEC = "spark.sql.iceberg.compression-codec"; + public static final long UNKNOWN_SNAPSHOT_ID = -1; + public static Expression convertToIcebergExpr(Expr expr, Schema schema) { if (expr == null) { return null; @@ -573,10 +576,17 @@ private static org.apache.iceberg.Table getIcebergTableInternal(ExternalCatalog /** * Get iceberg schema from catalog and convert them to doris schema */ - public static List getSchema(ExternalCatalog catalog, String dbName, String name) { + public static List getSchema(ExternalCatalog catalog, String dbName, String name, long schemaId) { return HiveMetaStoreClientHelper.ugiDoAs(catalog.getConfiguration(), () -> { org.apache.iceberg.Table icebergTable = getIcebergTable(catalog, dbName, name); - Schema schema = icebergTable.schema(); + Schema schema; + if (schemaId == UNKNOWN_SNAPSHOT_ID || icebergTable.currentSnapshot() == null) { + schema = icebergTable.schema(); + } else { + schema = icebergTable.schemas().get((int) schemaId); + } + Preconditions.checkNotNull(schema, + "Schema for table " + catalog.getName() + "." + dbName + "." + name + " is null"); List columns = schema.columns(); List tmpSchema = Lists.newArrayListWithCapacity(columns.size()); for (Types.NestedField field : columns) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/trinoconnector/TrinoConnectorExternalTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/trinoconnector/TrinoConnectorExternalTable.java index 27f9b8086a9cef..007ad864da3af8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/trinoconnector/TrinoConnectorExternalTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/trinoconnector/TrinoConnectorExternalTable.java @@ -108,7 +108,8 @@ public Optional initSchema() { qualifiedTable.asSchemaTableName(), Optional.empty(), Optional.empty())); } if (!connectorTableHandle.isPresent()) { - throw new RuntimeException(String.format("Table does not exist: %s.%s.%s", qualifiedTable)); + throw new RuntimeException(String.format("Table does not exist: %s.%s.%s", trinoConnectorCatalog.getName(), + dbName, name)); } // 4. Get ColumnHandle diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/CascadesContext.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/CascadesContext.java index 258704763909f1..4f81dde82d97f2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/CascadesContext.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/CascadesContext.java @@ -473,9 +473,16 @@ public void setCurrentRootRewriteJobContext(RootRewriteJobContext currentRootRew this.currentRootRewriteJobContext = Optional.ofNullable(currentRootRewriteJobContext); } + /** showPlanProcess */ public boolean showPlanProcess() { Boolean show = showPlanProcess.get(); - return show != null && show; + if (show != null && show) { + return true; + } + if (parent.isPresent()) { + return parent.get().showPlanProcess(); + } + return false; } /** set showPlanProcess in task scope */ diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java index 4eafa0e2172f96..b6b09348046cea 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java @@ -98,17 +98,19 @@ */ public class NereidsPlanner extends Planner { public static final Logger LOG = LogManager.getLogger(NereidsPlanner.class); + + protected Plan parsedPlan; + protected Plan analyzedPlan; + protected Plan rewrittenPlan; + protected Plan optimizedPlan; + protected PhysicalPlan physicalPlan; + private CascadesContext cascadesContext; private final StatementContext statementContext; private final List scanNodeList = Lists.newArrayList(); private final List physicalRelations = Lists.newArrayList(); private DescriptorTable descTable; - private Plan parsedPlan; - private Plan analyzedPlan; - private Plan rewrittenPlan; - private Plan optimizedPlan; - private PhysicalPlan physicalPlan; private FragmentIdMapping distributedPlans; // The cost of optimized plan private double cost = 0; @@ -552,7 +554,7 @@ public Group getRoot() { return cascadesContext.getMemo().getRoot(); } - private PhysicalPlan chooseNthPlan(Group rootGroup, PhysicalProperties physicalProperties, int nthPlan) { + protected PhysicalPlan chooseNthPlan(Group rootGroup, PhysicalProperties physicalProperties, int nthPlan) { if (nthPlan <= 1) { cost = rootGroup.getLowestCostPlan(physicalProperties).orElseThrow( () -> new AnalysisException("lowestCostPlans with physicalProperties(" @@ -605,6 +607,9 @@ private PhysicalPlan chooseBestPlan(Group rootGroup, PhysicalProperties physical } private long getGarbageCollectionTime() { + if (!ConnectContext.get().getSessionVariable().enableProfile()) { + return 0; + } List gcMxBeans = ManagementFactory.getGarbageCollectorMXBeans(); long initialGCTime = 0; for (GarbageCollectorMXBean gcBean : gcMxBeans) { @@ -708,6 +713,9 @@ public String getExplainString(ExplainOptions explainOptions) { + "========== OPTIMIZED PLAN " + getTimeMetricString(SummaryProfile::getPrettyNereidsOptimizeTime) + " ==========\n" + optimizedPlan.treeString() + "\n\n"; + if (cascadesContext != null && cascadesContext.getMemo() != null) { + plan += "========== MEMO " + cascadesContext.getMemo().toString() + "\n\n"; + } if (distributedPlans != null && !distributedPlans.isEmpty()) { plan += "========== DISTRIBUTED PLAN " @@ -881,7 +889,7 @@ private boolean showPlanProcess(ExplainOptions explainOptions) { return explainOptions != null && explainOptions.showPlanProcess(); } - private void keepOrShowPlanProcess(boolean showPlanProcess, Runnable task) { + protected void keepOrShowPlanProcess(boolean showPlanProcess, Runnable task) { if (showPlanProcess) { cascadesContext.withPlanProcess(showPlanProcess, task); } else { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/MappingSlot.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/MappingSlot.java index c7a020fd2abddf..2e9e84195508b1 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/MappingSlot.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/MappingSlot.java @@ -77,7 +77,7 @@ public boolean nullable() { } @Override - public String toSql() { + public String computeToSql() { return slot.toSql(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundAlias.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundAlias.java index 2be2130aba71d4..25d40dd5981194 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundAlias.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundAlias.java @@ -59,7 +59,7 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("(" + child() + ")"); alias.ifPresent(name -> stringBuilder.append(" AS " + name)); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundFunction.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundFunction.java index a53917f08cd8d0..b4b21e40dcd4df 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundFunction.java @@ -115,7 +115,7 @@ public List getArguments() { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { String params = children.stream() .map(Expression::toSql) .collect(Collectors.joining(", ")); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundInlineTable.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundInlineTable.java new file mode 100644 index 00000000000000..42d637d676fae2 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundInlineTable.java @@ -0,0 +1,87 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.analyzer; + +import org.apache.doris.nereids.exceptions.UnboundException; +import org.apache.doris.nereids.memo.GroupExpression; +import org.apache.doris.nereids.properties.LogicalProperties; +import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.NamedExpression; +import org.apache.doris.nereids.trees.expressions.Slot; +import org.apache.doris.nereids.trees.plans.BlockFuncDepsPropagation; +import org.apache.doris.nereids.trees.plans.Plan; +import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; +import org.apache.doris.nereids.trees.plans.logical.LogicalLeaf; +import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.nereids.util.Utils; + +import com.google.common.collect.ImmutableList; + +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +/** UnboundInlineTable */ +public class UnboundInlineTable extends LogicalLeaf implements InlineTable, BlockFuncDepsPropagation, UnboundPlan { + private final List> constantExprsList; + + public UnboundInlineTable(List> constantExprsList) { + super(PlanType.LOGICAL_UNBOUND_INLINE_TABLE, Optional.empty(), Optional.empty()); + this.constantExprsList = Utils.fastToImmutableList( + Objects.requireNonNull(constantExprsList, "constantExprsList can not be null") + ); + } + + public List> getConstantExprsList() { + return constantExprsList; + } + + @Override + public R accept(PlanVisitor visitor, C context) { + return visitor.visitUnboundInlineTable(this, context); + } + + @Override + public List getExpressions() { + ImmutableList.Builder expressions = ImmutableList.builderWithExpectedSize( + constantExprsList.size() * constantExprsList.get(0).size()); + + for (List namedExpressions : constantExprsList) { + expressions.addAll(namedExpressions); + } + + return expressions.build(); + } + + @Override + public Plan withGroupExpression(Optional groupExpression) { + return this; + } + + @Override + public Plan withGroupExprLogicalPropChildren(Optional groupExpression, + Optional logicalProperties, List children) { + return this; + } + + @Override + public List computeOutput() { + throw new UnboundException("output"); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundPlan.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundPlan.java new file mode 100644 index 00000000000000..2b743f958aaa02 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundPlan.java @@ -0,0 +1,39 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.analyzer; + +import org.apache.doris.nereids.exceptions.UnboundException; +import org.apache.doris.nereids.properties.LogicalProperties; +import org.apache.doris.nereids.properties.UnboundLogicalProperties; +import org.apache.doris.nereids.trees.expressions.Slot; +import org.apache.doris.nereids.trees.plans.Plan; + +import java.util.List; + +/** UnboundPlan */ +public interface UnboundPlan extends Plan { + @Override + default LogicalProperties computeLogicalProperties() { + return UnboundLogicalProperties.INSTANCE; + } + + @Override + default List computeOutput() { + throw new UnboundException("output"); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundSlot.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundSlot.java index f85812569804aa..fdcb9547837686 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundSlot.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundSlot.java @@ -81,8 +81,18 @@ public String getInternalName() { } @Override - public String toSql() { - return nameParts.stream().map(Utils::quoteIfNeeded).reduce((left, right) -> left + "." + right).orElse(""); + public String computeToSql() { + switch (nameParts.size()) { + case 1: return Utils.quoteIfNeeded(nameParts.get(0)); + case 2: return Utils.quoteIfNeeded(nameParts.get(0)) + "." + Utils.quoteIfNeeded(nameParts.get(1)); + case 3: return Utils.quoteIfNeeded(nameParts.get(0)) + "." + Utils.quoteIfNeeded(nameParts.get(1)) + + "." + Utils.quoteIfNeeded(nameParts.get(2)); + default: { + return nameParts.stream().map(Utils::quoteIfNeeded) + .reduce((left, right) -> left + "." + right) + .orElse(""); + } + } } public static UnboundSlot quoted(String name) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundStar.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundStar.java index 6d8ed904ec109d..cee6a0105f87c7 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundStar.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundStar.java @@ -98,7 +98,7 @@ public UnboundStar(List qualifier, List exceptedSlots, } @Override - public String toSql() { + public String computeToSql() { StringBuilder builder = new StringBuilder(); builder.append(Utils.qualifiedName(qualifier, "*")); if (!exceptedSlots.isEmpty()) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTableSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTableSink.java index 0e528227dc9742..8cf32648d55f05 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTableSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTableSink.java @@ -34,6 +34,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; +import org.apache.commons.lang3.StringUtils; import java.util.List; import java.util.Objects; @@ -176,4 +177,12 @@ public LogicalProperties computeLogicalProperties() { public List computeOutput() { throw new UnboundException("output"); } + + @Override + public String toString() { + return Utils.toSqlString("UnboundTableSink", + "nameParts", StringUtils.join(nameParts, "."), + "colNames", colNames, + "hints", hints); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java index f0fa59977a1902..85243c4b545420 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java @@ -207,6 +207,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; @@ -2058,17 +2059,22 @@ public PlanFragment visitPhysicalSetOperation( } setOperationNode.setNereidsId(setOperation.getId()); - setOperation.getRegularChildrenOutputs().stream() - .map(o -> o.stream() - .map(e -> ExpressionTranslator.translate(e, context)) - .collect(ImmutableList.toImmutableList())) - .forEach(setOperationNode::addResultExprLists); + for (List regularChildrenOutput : setOperation.getRegularChildrenOutputs()) { + Builder translateOutputs = ImmutableList.builderWithExpectedSize(regularChildrenOutput.size()); + for (SlotReference childOutput : regularChildrenOutput) { + translateOutputs.add(ExpressionTranslator.translate(childOutput, context)); + } + setOperationNode.addResultExprLists(translateOutputs.build()); + } + if (setOperation instanceof PhysicalUnion) { - ((PhysicalUnion) setOperation).getConstantExprsList().stream() - .map(l -> l.stream() - .map(e -> ExpressionTranslator.translate(e, context)) - .collect(ImmutableList.toImmutableList())) - .forEach(setOperationNode::addConstExprList); + for (List unionConsts : ((PhysicalUnion) setOperation).getConstantExprsList()) { + Builder translateConsts = ImmutableList.builderWithExpectedSize(unionConsts.size()); + for (NamedExpression unionConst : unionConsts) { + translateConsts.add(ExpressionTranslator.translate(unionConst, context)); + } + setOperationNode.addConstExprList(translateConsts.build()); + } } for (PlanFragment childFragment : childrenFragments) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/scheduler/SimpleJobScheduler.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/scheduler/SimpleJobScheduler.java index e31c92e3fe004c..1354f895a3c07a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/scheduler/SimpleJobScheduler.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/scheduler/SimpleJobScheduler.java @@ -34,11 +34,11 @@ public void executeJobPool(ScheduleContext scheduleContext) { CascadesContext context = (CascadesContext) scheduleContext; SessionVariable sessionVariable = context.getConnectContext().getSessionVariable(); while (!pool.isEmpty()) { + long elapsedS = context.getStatementContext().getStopwatch().elapsed(TimeUnit.MILLISECONDS) / 1000; if (sessionVariable.enableNereidsTimeout - && context.getStatementContext().getStopwatch().elapsed(TimeUnit.MILLISECONDS) - > sessionVariable.nereidsTimeoutSecond * 1000L) { - throw new AnalysisException( - "Nereids cost too much time ( > " + sessionVariable.nereidsTimeoutSecond + "s )"); + && elapsedS > sessionVariable.nereidsTimeoutSecond) { + throw new AnalysisException(String.format("Nereids cost too much time ( %ds > %ds", + elapsedS, sessionVariable.nereidsTimeoutSecond)); } Job job = pool.pop(); job.execute(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java index bb344e1b376deb..d98d0660f5c9cb 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java @@ -324,6 +324,7 @@ import org.apache.doris.nereids.DorisParser.UpdateAssignmentContext; import org.apache.doris.nereids.DorisParser.UpdateAssignmentSeqContext; import org.apache.doris.nereids.DorisParser.UpdateContext; +import org.apache.doris.nereids.DorisParser.UseDatabaseContext; import org.apache.doris.nereids.DorisParser.UserIdentifyContext; import org.apache.doris.nereids.DorisParser.UserVariableContext; import org.apache.doris.nereids.DorisParser.WhereClauseContext; @@ -334,7 +335,7 @@ import org.apache.doris.nereids.StatementContext; import org.apache.doris.nereids.analyzer.UnboundAlias; import org.apache.doris.nereids.analyzer.UnboundFunction; -import org.apache.doris.nereids.analyzer.UnboundOneRowRelation; +import org.apache.doris.nereids.analyzer.UnboundInlineTable; import org.apache.doris.nereids.analyzer.UnboundRelation; import org.apache.doris.nereids.analyzer.UnboundResultSink; import org.apache.doris.nereids.analyzer.UnboundSlot; @@ -356,6 +357,7 @@ import org.apache.doris.nereids.properties.SelectHintUseMv; import org.apache.doris.nereids.trees.TableSample; import org.apache.doris.nereids.trees.expressions.Add; +import org.apache.doris.nereids.trees.expressions.Alias; import org.apache.doris.nereids.trees.expressions.And; import org.apache.doris.nereids.trees.expressions.BitAnd; import org.apache.doris.nereids.trees.expressions.BitNot; @@ -488,6 +490,8 @@ import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.PlanType; import org.apache.doris.nereids.trees.plans.algebra.Aggregate; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; +import org.apache.doris.nereids.trees.plans.algebra.OneRowRelation; import org.apache.doris.nereids.trees.plans.algebra.SetOperation.Qualifier; import org.apache.doris.nereids.trees.plans.commands.AddConstraintCommand; import org.apache.doris.nereids.trees.plans.commands.AdminCancelRebalanceDiskCommand; @@ -683,6 +687,7 @@ import org.apache.doris.nereids.trees.plans.commands.refresh.RefreshDatabaseCommand; import org.apache.doris.nereids.trees.plans.commands.refresh.RefreshTableCommand; import org.apache.doris.nereids.trees.plans.commands.use.SwitchCommand; +import org.apache.doris.nereids.trees.plans.commands.use.UseCommand; import org.apache.doris.nereids.trees.plans.logical.LogicalAggregate; import org.apache.doris.nereids.trees.plans.logical.LogicalCTE; import org.apache.doris.nereids.trees.plans.logical.LogicalExcept; @@ -690,10 +695,10 @@ import org.apache.doris.nereids.trees.plans.logical.LogicalFilter; import org.apache.doris.nereids.trees.plans.logical.LogicalGenerate; import org.apache.doris.nereids.trees.plans.logical.LogicalHaving; -import org.apache.doris.nereids.trees.plans.logical.LogicalInlineTable; import org.apache.doris.nereids.trees.plans.logical.LogicalIntersect; import org.apache.doris.nereids.trees.plans.logical.LogicalJoin; import org.apache.doris.nereids.trees.plans.logical.LogicalLimit; +import org.apache.doris.nereids.trees.plans.logical.LogicalOneRowRelation; import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; import org.apache.doris.nereids.trees.plans.logical.LogicalProject; import org.apache.doris.nereids.trees.plans.logical.LogicalQualify; @@ -885,7 +890,7 @@ public LogicalPlan visitInsertTable(InsertTableContext ctx) { } else { throw new ParseException("tableName and tableId cannot both be null"); } - Optional labelName = ctx.labelName == null ? Optional.empty() : Optional.of(ctx.labelName.getText()); + Optional labelName = (ctx.labelName == null) ? Optional.empty() : Optional.of(ctx.labelName.getText()); List colNames = ctx.cols == null ? ImmutableList.of() : visitIdentifierList(ctx.cols); // TODO visit partitionSpecCtx LogicalPlan plan = visitQuery(ctx.query()); @@ -916,7 +921,7 @@ public LogicalPlan visitInsertTable(InsertTableContext ctx) { command = new InsertOverwriteTableCommand(sink, labelName, cte); } else { if (ConnectContext.get() != null && ConnectContext.get().isTxnModel() - && sink.child() instanceof LogicalInlineTable + && sink.child() instanceof InlineTable && sink.child().getExpressions().stream().allMatch(Expression::isConstant)) { // FIXME: In legacy, the `insert into select 1` is handled as `insert into values`. // In nereids, the original way is throw an AnalysisException and fallback to legacy. @@ -1846,8 +1851,8 @@ public LogicalPlan visitRegularQuerySpecification(RegularQuerySpecificationConte LogicalPlan selectPlan; LogicalPlan relation; if (ctx.fromClause() == null) { - relation = new UnboundOneRowRelation(StatementScopeIdGenerator.newRelationId(), - ImmutableList.of(new UnboundAlias(Literal.of(0)))); + relation = new LogicalOneRowRelation(StatementScopeIdGenerator.newRelationId(), + ImmutableList.of(new Alias(Literal.of(0)))); } else { relation = visitFromClause(ctx.fromClause()); } @@ -1877,10 +1882,13 @@ public LogicalPlan visitRegularQuerySpecification(RegularQuerySpecificationConte @Override public LogicalPlan visitInlineTable(InlineTableContext ctx) { - List> values = ctx.rowConstructor().stream() - .map(this::visitRowConstructor) - .collect(ImmutableList.toImmutableList()); - return new LogicalInlineTable(values); + List rowConstructorContexts = ctx.rowConstructor(); + ImmutableList.Builder> rows + = ImmutableList.builderWithExpectedSize(rowConstructorContexts.size()); + for (RowConstructorContext rowConstructorContext : rowConstructorContexts) { + rows.add(visitRowConstructor(rowConstructorContext)); + } + return new UnboundInlineTable(rows.build()); } /** @@ -2047,18 +2055,22 @@ public Expression visitStar(StarContext ctx) { throw new ParseException("only one replace clause is supported", ctx); } ReplaceContext replaceContext = (ReplaceContext) exceptOrReplace; - List expectAlias = getNamedExpressions(replaceContext.namedExpressionSeq()); - boolean allAlias = expectAlias.stream() - .allMatch(e -> e instanceof UnboundAlias - && ((UnboundAlias) e).getAlias().isPresent()); - if (expectAlias.isEmpty() || !allAlias) { - throw new ParseException( - "only alias is supported in select-replace clause", ctx); + List expectAlias = Lists.newArrayList(); + NamedExpressionSeqContext namedExpressions = replaceContext.namedExpressionSeq(); + for (NamedExpressionContext namedExpressionContext : namedExpressions.namedExpression()) { + if (namedExpressionContext.identifierOrText() == null) { + throw new ParseException("only alias is supported in select-replace clause", ctx); + } + expectAlias.add((NamedExpression) namedExpressionContext.accept(this)); + } + if (expectAlias.isEmpty()) { + throw new ParseException("only alias is supported in select-replace clause", ctx); } finalReplacedAlias = expectAlias; } else { - throw new ParseException("Unsupported except or replace clause: " + exceptOrReplace.getText(), - ctx); + throw new ParseException( + "Unsupported except or replace clause: " + exceptOrReplace.getText(), ctx + ); } } return new UnboundStar(target, finalExpectSlots, finalReplacedAlias); @@ -2079,11 +2091,16 @@ public NamedExpression visitNamedExpression(NamedExpressionContext ctx) { if (ctx.identifierOrText() == null) { if (expression instanceof NamedExpression) { return (NamedExpression) expression; + } else if (expression instanceof Literal) { + return new Alias(expression); } else { return new UnboundAlias(expression); } } String alias = visitIdentifierOrText(ctx.identifierOrText()); + if (expression instanceof Literal) { + return new Alias(expression, alias); + } return new UnboundAlias(expression, alias); }); } @@ -3011,14 +3028,21 @@ public Expression visitParenthesizedExpression(ParenthesizedExpressionContext ct @Override public List visitRowConstructor(RowConstructorContext ctx) { - return ctx.rowConstructorItem().stream() - .map(this::visitRowConstructorItem) - .collect(ImmutableList.toImmutableList()); + List rowConstructorItemContexts = ctx.rowConstructorItem(); + ImmutableList.Builder columns + = ImmutableList.builderWithExpectedSize(rowConstructorItemContexts.size()); + for (RowConstructorItemContext rowConstructorItemContext : rowConstructorItemContexts) { + columns.add(visitRowConstructorItem(rowConstructorItemContext)); + } + return columns.build(); } @Override public NamedExpression visitRowConstructorItem(RowConstructorItemContext ctx) { - if (ctx.DEFAULT() != null) { + ConstantContext constant = ctx.constant(); + if (constant != null) { + return new Alias((Expression) constant.accept(this)); + } else if (ctx.DEFAULT() != null) { return new DefaultValueSlot(); } else { return visitNamedExpression(ctx.namedExpression()); @@ -3563,14 +3587,6 @@ private LogicalPlan withLimit(LogicalPlan input, Optional li }); } - private UnboundOneRowRelation withOneRowRelation(SelectColumnClauseContext selectCtx) { - return ParserUtils.withOrigin(selectCtx, () -> { - // fromClause does not exists. - List projects = getNamedExpressions(selectCtx.namedExpressionSeq()); - return new UnboundOneRowRelation(StatementScopeIdGenerator.newRelationId(), projects); - }); - } - /** * Add a regular (SELECT) query specification to a logical plan. The query specification * is the core of the logical plan, this is where sourcing (FROM clause), projection (SELECT), @@ -3828,7 +3844,7 @@ protected LogicalPlan withProjection(LogicalPlan input, SelectColumnClauseContex } } else { List projects = getNamedExpressions(selectCtx.namedExpressionSeq()); - if (input instanceof UnboundOneRowRelation) { + if (input instanceof OneRowRelation) { if (projects.stream().anyMatch(project -> project instanceof UnboundStar)) { throw new ParseException("SELECT * must have a FROM clause"); } @@ -5178,12 +5194,20 @@ public LogicalPlan visitShowQueryProfile(ShowQueryProfileContext ctx) { } @Override - public Object visitSwitchCatalog(SwitchCatalogContext ctx) { - String catalogName = ctx.catalog.getText(); - if (catalogName != null) { - return new SwitchCommand(catalogName); + public LogicalPlan visitSwitchCatalog(SwitchCatalogContext ctx) { + if (ctx.catalog != null) { + return new SwitchCommand(ctx.catalog.getText()); } - throw new AnalysisException("catalog name can not be null"); + throw new ParseException("catalog name can not be null"); + } + + @Override + public LogicalPlan visitUseDatabase(UseDatabaseContext ctx) { + if (ctx.database == null) { + throw new ParseException("database name can not be null"); + } + return ctx.catalog != null ? new UseCommand(ctx.catalog.getText(), ctx.database.getText()) + : new UseCommand(ctx.database.getText()); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java index 34646c1d657953..c273f50b04ac44 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java @@ -23,7 +23,9 @@ import org.apache.doris.common.Pair; import org.apache.doris.nereids.DorisLexer; import org.apache.doris.nereids.DorisParser; +import org.apache.doris.nereids.DorisParser.NonReservedContext; import org.apache.doris.nereids.StatementContext; +import org.apache.doris.nereids.analyzer.UnboundSlot; import org.apache.doris.nereids.glue.LogicalPlanAdapter; import org.apache.doris.nereids.parser.plsql.PLSqlLogicalPlanBuilder; import org.apache.doris.nereids.trees.expressions.Expression; @@ -35,6 +37,8 @@ import org.apache.doris.qe.ConnectContext; import org.apache.doris.qe.SessionVariable; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.antlr.v4.runtime.CharStreams; @@ -45,14 +49,18 @@ import org.antlr.v4.runtime.TokenSource; import org.antlr.v4.runtime.atn.PredictionMode; import org.antlr.v4.runtime.misc.ParseCancellationException; +import org.antlr.v4.runtime.tree.TerminalNode; import org.apache.commons.collections.CollectionUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import java.lang.reflect.Method; import java.util.BitSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.function.Function; import javax.annotation.Nullable; @@ -66,6 +74,9 @@ public class NereidsParser { private static final BitSet EXPLAIN_TOKENS = new BitSet(); + private static final Set NON_RESERVED_KEYWORDS; + private static final Map LITERAL_TOKENS; + static { EXPLAIN_TOKENS.set(DorisLexer.EXPLAIN); EXPLAIN_TOKENS.set(DorisLexer.PARSED); @@ -77,6 +88,25 @@ public class NereidsParser { EXPLAIN_TOKENS.set(DorisLexer.PLAN); EXPLAIN_TOKENS.set(DorisLexer.PROCESS); + ImmutableSet.Builder nonReserveds = ImmutableSet.builder(); + for (Method declaredMethod : NonReservedContext.class.getDeclaredMethods()) { + if (TerminalNode.class.equals(declaredMethod.getReturnType()) + && declaredMethod.getName().toUpperCase().equals(declaredMethod.getName()) + && declaredMethod.getParameterTypes().length == 0) { + String nonReserved = declaredMethod.getName(); + nonReserveds.add(nonReserved); + } + } + NON_RESERVED_KEYWORDS = nonReserveds.build(); + + ImmutableMap.Builder literalToTokenType = ImmutableMap.builder(); + for (int tokenType = 0; tokenType <= DorisLexer.VOCABULARY.getMaxTokenType(); tokenType++) { + String literalName = DorisLexer.VOCABULARY.getLiteralName(tokenType); + if (literalName != null) { + literalToTokenType.put(literalName.substring(1, literalName.length() - 1), tokenType); + } + } + LITERAL_TOKENS = literalToTokenType.build(); } /** @@ -256,9 +286,33 @@ public List> parseMultiple(String sql, } public Expression parseExpression(String expression) { + if (isSimpleIdentifier(expression)) { + return new UnboundSlot(expression); + } return parse(expression, DorisParser::expression); } + private static boolean isSimpleIdentifier(String expression) { + if (expression == null || expression.isEmpty()) { + return false; + } + + boolean hasLetter = false; + for (int i = 0; i < expression.length(); i++) { + char c = expression.charAt(i); + if ((('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '_' || c == '$')) { + hasLetter = true; + } else if (!('0' <= c && c <= '9')) { + return false; + } + } + if (!hasLetter) { + return false; + } + String upperCase = expression.toUpperCase(); + return (NON_RESERVED_KEYWORDS.contains(upperCase) || !LITERAL_TOKENS.containsKey(upperCase)); + } + public DataType parseDataType(String dataType) { return parse(dataType, DorisParser::dataType); } @@ -273,37 +327,40 @@ private T parse(String sql, Function parseFu private T parse(String sql, @Nullable LogicalPlanBuilder logicalPlanBuilder, Function parseFunction) { - ParserRuleContext tree = toAst(sql, parseFunction); + CommonTokenStream tokenStream = parseAllTokens(sql); + ParserRuleContext tree = toAst(tokenStream, parseFunction); LogicalPlanBuilder realLogicalPlanBuilder = logicalPlanBuilder == null - ? new LogicalPlanBuilder(getHintMap(sql, DorisParser::selectHint)) : logicalPlanBuilder; + ? new LogicalPlanBuilder(getHintMap(sql, tokenStream, DorisParser::selectHint)) + : logicalPlanBuilder; return (T) realLogicalPlanBuilder.visit(tree); } public LogicalPlan parseForCreateView(String sql) { - ParserRuleContext tree = toAst(sql, DorisParser::singleStatement); + CommonTokenStream tokenStream = parseAllTokens(sql); + ParserRuleContext tree = toAst(tokenStream, DorisParser::singleStatement); LogicalPlanBuilder realLogicalPlanBuilder = new LogicalPlanBuilderForCreateView( - getHintMap(sql, DorisParser::selectHint)); + getHintMap(sql, tokenStream, DorisParser::selectHint)); return (LogicalPlan) realLogicalPlanBuilder.visit(tree); } + /** parseForSyncMv */ public Optional parseForSyncMv(String sql) { - ParserRuleContext tree = toAst(sql, DorisParser::singleStatement); + CommonTokenStream tokenStream = parseAllTokens(sql); + ParserRuleContext tree = toAst(tokenStream, DorisParser::singleStatement); LogicalPlanBuilderForSyncMv logicalPlanBuilderForSyncMv = new LogicalPlanBuilderForSyncMv( - getHintMap(sql, DorisParser::selectHint)); + getHintMap(sql, tokenStream, DorisParser::selectHint)); logicalPlanBuilderForSyncMv.visit(tree); return logicalPlanBuilderForSyncMv.getQuerySql(); } /** get hint map */ - public static Map getHintMap(String sql, + public static Map getHintMap(String sql, CommonTokenStream hintTokenStream, Function parseFunction) { // parse hint first round - DorisLexer hintLexer = new DorisLexer(new CaseInsensitiveStream(CharStreams.fromString(sql))); - CommonTokenStream hintTokenStream = new CommonTokenStream(hintLexer); - Map selectHintMap = Maps.newHashMap(); - Token hintToken = hintTokenStream.getTokenSource().nextToken(); + Iterator tokenIterator = hintTokenStream.getTokens().iterator(); + Token hintToken = tokenIterator.hasNext() ? tokenIterator.next() : null; while (hintToken != null && hintToken.getType() != DorisLexer.EOF) { if (hintToken.getChannel() == 2 && sql.charAt(hintToken.getStartIndex() + 2) == '+') { String hintSql = sql.substring(hintToken.getStartIndex() + 3, hintToken.getStopIndex() + 1); @@ -313,15 +370,19 @@ public static Map getHintMap(String sql, ParserRuleContext hintContext = parseFunction.apply(hintParser); selectHintMap.put(hintToken.getStartIndex(), hintContext); } - hintToken = hintTokenStream.getTokenSource().nextToken(); + hintToken = tokenIterator.hasNext() ? tokenIterator.next() : null; } return selectHintMap; } + public static ParserRuleContext toAst( + String sql, Function parseFunction) { + return toAst(parseAllTokens(sql), parseFunction); + } + /** toAst */ - public static ParserRuleContext toAst(String sql, Function parseFunction) { - DorisLexer lexer = new DorisLexer(new CaseInsensitiveStream(CharStreams.fromString(sql))); - CommonTokenStream tokenStream = new CommonTokenStream(lexer); + public static ParserRuleContext toAst( + CommonTokenStream tokenStream, Function parseFunction) { DorisParser parser = new DorisParser(tokenStream); parser.addParseListener(POST_PROCESSOR); @@ -352,9 +413,7 @@ public static ParserRuleContext toAst(String sql, Function> aggregate() { default PatternDescriptor> aggregate(PatternDescriptor child) { return new PatternDescriptor(new TypePattern(Aggregate.class, child.pattern), defaultPromise()); } + + /** + * create a aggregate pattern. + */ + default PatternDescriptor inlineTable() { + return new PatternDescriptor(new TypePattern(InlineTable.class), defaultPromise()); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/RuleType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/RuleType.java index 4cf3c75b68dc43..86d0495b851bd2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/RuleType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/RuleType.java @@ -62,6 +62,7 @@ public enum RuleType { BINDING_UNBOUND_TVF_RELATION_FUNCTION(RuleTypeClass.REWRITE), BINDING_SET_OPERATION_SLOT(RuleTypeClass.REWRITE), BINDING_INLINE_TABLE_SLOT(RuleTypeClass.REWRITE), + LOGICAL_INLINE_TABLE_TO_LOGICAL_UNION_OR_ONE_ROW_RELATION(RuleTypeClass.REWRITE), COUNT_LITERAL_REWRITE(RuleTypeClass.REWRITE), SUM_LITERAL_REWRITE(RuleTypeClass.REWRITE), @@ -496,8 +497,7 @@ public enum RuleType { IMPLEMENTATION_SENTINEL(RuleTypeClass.IMPLEMENTATION), // sentinel, use to count rules - SENTINEL(RuleTypeClass.SENTINEL), - ; + SENTINEL(RuleTypeClass.SENTINEL); private final RuleTypeClass ruleTypeClass; diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindExpression.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindExpression.java index 1e481542baec12..c308a1e7e796e8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindExpression.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindExpression.java @@ -66,6 +66,7 @@ import org.apache.doris.nereids.trees.plans.JoinType; import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.algebra.Aggregate; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; import org.apache.doris.nereids.trees.plans.algebra.SetOperation; import org.apache.doris.nereids.trees.plans.algebra.SetOperation.Qualifier; import org.apache.doris.nereids.trees.plans.logical.LogicalAggregate; @@ -74,7 +75,6 @@ import org.apache.doris.nereids.trees.plans.logical.LogicalFilter; import org.apache.doris.nereids.trees.plans.logical.LogicalGenerate; import org.apache.doris.nereids.trees.plans.logical.LogicalHaving; -import org.apache.doris.nereids.trees.plans.logical.LogicalInlineTable; import org.apache.doris.nereids.trees.plans.logical.LogicalIntersect; import org.apache.doris.nereids.trees.plans.logical.LogicalJoin; import org.apache.doris.nereids.trees.plans.logical.LogicalOneRowRelation; @@ -195,7 +195,7 @@ protected boolean condition(Rule rule, Plan plan) { logicalQualify(logicalHaving()).thenApply(this::bindQualifyHaving) ), RuleType.BINDING_INLINE_TABLE_SLOT.build( - logicalInlineTable().thenApply(this::bindInlineTable) + inlineTable().thenApply(this::bindInlineTable) ), RuleType.BINDING_ONE_ROW_RELATION_SLOT.build( // we should bind UnboundAlias in the UnboundOneRowRelation @@ -349,24 +349,24 @@ private LogicalOneRowRelation bindOneRowRelation(MatchingContext ctx) { - LogicalInlineTable logicalInlineTable = ctx.root; + private LogicalPlan bindInlineTable(MatchingContext ctx) { + InlineTable inlineTable = ctx.root; // ensure all expressions are valid. + List> constantExprsList = inlineTable.getConstantExprsList(); List relations - = Lists.newArrayListWithCapacity(logicalInlineTable.getConstantExprsList().size()); - for (int i = 0; i < logicalInlineTable.getConstantExprsList().size(); i++) { - for (NamedExpression constantExpr : logicalInlineTable.getConstantExprsList().get(i)) { + = Lists.newArrayListWithCapacity(constantExprsList.size()); + for (int i = 0; i < constantExprsList.size(); i++) { + List row = constantExprsList.get(i); + for (NamedExpression constantExpr : row) { if (constantExpr instanceof DefaultValueSlot) { throw new AnalysisException("Default expression" + " can't exist in SELECT statement at row " + (i + 1)); } } - relations.add(new UnboundOneRowRelation(StatementScopeIdGenerator.newRelationId(), - logicalInlineTable.getConstantExprsList().get(i))); + relations.add(new UnboundOneRowRelation(StatementScopeIdGenerator.newRelationId(), row)); } // construct union all tree - return LogicalPlanBuilder.reduceToLogicalPlanTree(0, relations.size() - 1, - relations, Qualifier.ALL); + return LogicalPlanBuilder.reduceToLogicalPlanTree(0, relations.size() - 1, relations, Qualifier.ALL); } private LogicalHaving bindHaving(MatchingContext> ctx) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java index 9b4ff631a838b6..f08148d73e7fb6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java @@ -80,6 +80,7 @@ import org.apache.doris.nereids.util.ExpressionUtils; import org.apache.doris.nereids.util.RelationUtil; import org.apache.doris.nereids.util.TypeCoercionUtils; +import org.apache.doris.nereids.util.Utils; import org.apache.doris.qe.ConnectContext; import com.google.common.base.Preconditions; @@ -253,7 +254,7 @@ private Plan bindOlapTableSink(MatchingContext> ctx) { private LogicalProject getOutputProjectByCoercion(List tableSchema, LogicalPlan child, Map columnToOutput) { - List fullOutputExprs = ImmutableList.copyOf(columnToOutput.values()); + List fullOutputExprs = Utils.fastToImmutableList(columnToOutput.values()); if (child instanceof LogicalOneRowRelation) { // remove default value slot in one row relation child = ((LogicalOneRowRelation) child).withProjects(((LogicalOneRowRelation) child) @@ -274,6 +275,7 @@ private LogicalProject getOutputProjectByCoercion(List tableSchema, L // we skip it. continue; } + expr = expr.toSlot(); DataType inputType = expr.getDataType(); DataType targetType = DataType.fromCatalogType(tableSchema.get(i).getType()); Expression castExpr = expr; diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/CollectRelation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/CollectRelation.java index 9c6e3adbe74e1b..01adc549e3686d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/CollectRelation.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/CollectRelation.java @@ -29,6 +29,7 @@ import org.apache.doris.nereids.analyzer.UnboundRelation; import org.apache.doris.nereids.analyzer.UnboundResultSink; import org.apache.doris.nereids.analyzer.UnboundTableSink; +import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.parser.NereidsParser; import org.apache.doris.nereids.pattern.MatchingContext; import org.apache.doris.nereids.properties.PhysicalProperties; @@ -197,7 +198,13 @@ private void collectMTMVCandidates(TableIf table, CascadesContext cascadesContex try { for (BaseTableInfo baseTableInfo : mtmv.getRelation().getBaseTables()) { LOG.info("mtmv {} related base table include {}", new BaseTableInfo(mtmv), baseTableInfo); - cascadesContext.getStatementContext().getAndCacheTable(baseTableInfo.toList(), TableFrom.MTMV); + try { + cascadesContext.getStatementContext().getAndCacheTable(baseTableInfo.toList(), + TableFrom.MTMV); + } catch (AnalysisException exception) { + LOG.warn("mtmv related base table get err, related table is " + + baseTableInfo.toList(), exception); + } } } finally { mtmv.readMvUnlock(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/ExpressionRewrite.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/ExpressionRewrite.java index e5b74ee26bcb02..0fcc58e0273d2f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/ExpressionRewrite.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/ExpressionRewrite.java @@ -43,6 +43,7 @@ import org.apache.doris.nereids.util.ExpressionUtils; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.ImmutableSet; import java.util.Collection; @@ -85,7 +86,8 @@ public List buildRules() { new OlapTableSinkExpressionRewrite().build()); } - private class GenerateExpressionRewrite extends OneRewriteRuleFactory { + /** GenerateExpressionRewrite */ + public class GenerateExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalGenerate().thenApply(ctx -> { @@ -103,7 +105,8 @@ public Rule build() { } } - private class OneRowRelationExpressionRewrite extends OneRewriteRuleFactory { + /** OneRowRelationExpressionRewrite */ + public class OneRowRelationExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalOneRowRelation().thenApply(ctx -> { @@ -111,19 +114,25 @@ public Rule build() { List projects = oneRowRelation.getProjects(); ExpressionRewriteContext context = new ExpressionRewriteContext(ctx.cascadesContext); - List newProjects = projects - .stream() - .map(expr -> (NamedExpression) rewriter.rewrite(expr, context)) - .collect(ImmutableList.toImmutableList()); - if (projects.equals(newProjects)) { - return oneRowRelation; + Builder rewrittenExprs + = ImmutableList.builderWithExpectedSize(projects.size()); + boolean changed = false; + for (NamedExpression project : projects) { + NamedExpression newProject = (NamedExpression) rewriter.rewrite(project, context); + if (!changed && !project.deepEquals(newProject)) { + changed = true; + } + rewrittenExprs.add(newProject); } - return new LogicalOneRowRelation(oneRowRelation.getRelationId(), newProjects); + return changed + ? new LogicalOneRowRelation(oneRowRelation.getRelationId(), rewrittenExprs.build()) + : oneRowRelation; }).toRule(RuleType.REWRITE_ONE_ROW_RELATION_EXPRESSION); } } - private class ProjectExpressionRewrite extends OneRewriteRuleFactory { + /** ProjectExpressionRewrite */ + public class ProjectExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalProject().thenApply(ctx -> { @@ -139,7 +148,8 @@ public Rule build() { } } - private class FilterExpressionRewrite extends OneRewriteRuleFactory { + /** FilterExpressionRewrite */ + public class FilterExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalFilter().thenApply(ctx -> { @@ -155,7 +165,8 @@ public Rule build() { } } - private class OlapTableSinkExpressionRewrite extends OneRewriteRuleFactory { + /** OlapTableSinkExpressionRewrite */ + public class OlapTableSinkExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalOlapTableSink().thenApply(ctx -> { @@ -177,7 +188,8 @@ public Rule build() { } } - private class AggExpressionRewrite extends OneRewriteRuleFactory { + /** AggExpressionRewrite */ + public class AggExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalAggregate().thenApply(ctx -> { @@ -197,7 +209,8 @@ public Rule build() { } } - private class JoinExpressionRewrite extends OneRewriteRuleFactory { + /** JoinExpressionRewrite */ + public class JoinExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalJoin().thenApply(ctx -> { @@ -244,7 +257,8 @@ private Pair> rewriteConjuncts(List conjun } } - private class SortExpressionRewrite extends OneRewriteRuleFactory { + /** SortExpressionRewrite */ + public class SortExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { @@ -265,7 +279,8 @@ public Rule build() { } } - private class HavingExpressionRewrite extends OneRewriteRuleFactory { + /** HavingExpressionRewrite */ + public class HavingExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalHaving().thenApply(ctx -> { @@ -281,7 +296,8 @@ public Rule build() { } } - private class LogicalRepeatRewrite extends OneRewriteRuleFactory { + /** LogicalRepeatRewrite */ + public class LogicalRepeatRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalRepeat().thenApply(ctx -> { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/ConvertAggStateCast.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/ConvertAggStateCast.java index 6aa4529ddd4ab6..6d5a70139ab19c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/ConvertAggStateCast.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/ConvertAggStateCast.java @@ -47,7 +47,8 @@ public List> buildRules() { ); } - private static Expression convert(Cast cast) { + /** convert */ + public static Expression convert(Cast cast) { Expression child = cast.child(); DataType originalType = child.getDataType(); DataType targetType = cast.getDataType(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/FoldConstantRuleOnFE.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/FoldConstantRuleOnFE.java index d1c385ec621062..c439458ff4c96e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/FoldConstantRuleOnFE.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/FoldConstantRuleOnFE.java @@ -24,7 +24,9 @@ import org.apache.doris.common.util.DebugUtil; import org.apache.doris.datasource.InternalCatalog; import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.nereids.analyzer.UnboundVariable; import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.nereids.rules.analysis.ExpressionAnalyzer; import org.apache.doris.nereids.rules.expression.AbstractExpressionRewriteRule; import org.apache.doris.nereids.rules.expression.ExpressionListenerMatcher; import org.apache.doris.nereids.rules.expression.ExpressionMatchingContext; @@ -53,6 +55,7 @@ import org.apache.doris.nereids.trees.expressions.Or; import org.apache.doris.nereids.trees.expressions.Slot; import org.apache.doris.nereids.trees.expressions.TimestampArithmetic; +import org.apache.doris.nereids.trees.expressions.Variable; import org.apache.doris.nereids.trees.expressions.WhenClause; import org.apache.doris.nereids.trees.expressions.functions.BoundFunction; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; @@ -220,6 +223,12 @@ public Expression visitMatch(Match match, ExpressionRewriteContext context) { return super.visitMatch(match, context); } + @Override + public Expression visitUnboundVariable(UnboundVariable unboundVariable, ExpressionRewriteContext context) { + Variable variable = ExpressionAnalyzer.resolveUnboundVariable(unboundVariable); + return variable.getRealExpression(); + } + @Override public Expression visitEncryptKeyRef(EncryptKeyRef encryptKeyRef, ExpressionRewriteContext context) { String dbName = encryptKeyRef.getDbName(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/OneRangePartitionEvaluator.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/OneRangePartitionEvaluator.java index 7e91d5502f7fad..eb9fd6e149160b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/OneRangePartitionEvaluator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/OneRangePartitionEvaluator.java @@ -48,6 +48,7 @@ import org.apache.doris.nereids.trees.expressions.literal.BooleanLiteral; import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.literal.MaxLiteral; +import org.apache.doris.nereids.trees.expressions.literal.NullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BooleanType; import org.apache.doris.nereids.util.ExpressionUtils; @@ -807,22 +808,29 @@ private EvaluateRangeResult computeMonotonicFunctionRange(EvaluateRangeResult re : new NonNullable(funcChild)); partitionSlotContainsNull.put((Expression) func, withNullable.nullable()); - if (!result.childrenResult.get(0).columnRanges.containsKey(funcChild)) { + if (!result.childrenResult.get(childIndex).columnRanges.containsKey(funcChild)) { return result; } - ColumnRange childRange = result.childrenResult.get(0).columnRanges.get(funcChild); + ColumnRange childRange = result.childrenResult.get(childIndex).columnRanges.get(funcChild); if (childRange.isEmptyRange() || childRange.asRanges().size() != 1 || (!childRange.span().hasLowerBound() && !childRange.span().hasUpperBound())) { return result; } Range span = childRange.span(); + // null means positive infinity or negative infinity Literal lower = span.hasLowerBound() ? span.lowerEndpoint().getValue() : null; Literal upper = span.hasUpperBound() && !(span.upperEndpoint().getValue() instanceof MaxLiteral) ? span.upperEndpoint().getValue() : null; + if (!func.isMonotonic(lower, upper)) { + return result; + } Expression lowerValue = lower != null ? FoldConstantRuleOnFE.evaluate(func.withConstantArgs(lower), expressionRewriteContext) : null; Expression upperValue = upper != null ? FoldConstantRuleOnFE.evaluate(func.withConstantArgs(upper), expressionRewriteContext) : null; + if (lowerValue instanceof NullLiteral || upperValue instanceof NullLiteral) { + return result; + } if (!func.isPositive()) { Expression temp = lowerValue; lowerValue = upperValue; @@ -842,6 +850,9 @@ private EvaluateRangeResult computeMonotonicFunctionRange(EvaluateRangeResult re if (upperValue instanceof Literal) { newRange = newRange.withUpperBound((Literal) upperValue); } + if (newRange.isEmptyRange() || !newRange.span().hasLowerBound() && !newRange.span().hasUpperBound()) { + return result; + } context.rangeMap.put((Expression) func, newRange); newRanges.put((Expression) func, newRange); return new EvaluateRangeResult((Expression) func, newRanges, result.childrenResult); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicate.java index fbe0d44417363a..7dc9ddcb3971ef 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicate.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicate.java @@ -182,7 +182,7 @@ private static Expression processDateLikeTypeCoercion(ComparisonPredicate cp, Ex DateTimeLiteral dateTimeLiteral = (DateTimeLiteral) right; right = migrateToDateV2(dateTimeLiteral); if (dateTimeLiteral.getHour() != 0 || dateTimeLiteral.getMinute() != 0 - || dateTimeLiteral.getSecond() != 0) { + || dateTimeLiteral.getSecond() != 0 || dateTimeLiteral.getMicroSecond() != 0) { if (cp instanceof EqualTo) { return ExpressionUtils.falseOrNull(cast.child()); } else if (cp instanceof NullSafeEqual) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/LogicalResultSinkToShortCircuitPointQuery.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/LogicalResultSinkToShortCircuitPointQuery.java similarity index 100% rename from fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/LogicalResultSinkToShortCircuitPointQuery.java rename to fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/LogicalResultSinkToShortCircuitPointQuery.java diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/AbstractSelectMaterializedIndexRule.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/AbstractSelectMaterializedIndexRule.java index 357883d1f7136a..3c31ce22e4611a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/AbstractSelectMaterializedIndexRule.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/AbstractSelectMaterializedIndexRule.java @@ -219,8 +219,9 @@ public static String parseMvColumnToMvName(String mvName, Optional aggTy } protected static boolean containsAllColumn(Expression expression, Set mvColumnNames) { - if (mvColumnNames.contains(expression.toSql()) || mvColumnNames - .contains(org.apache.doris.analysis.CreateMaterializedViewStmt.mvColumnBreaker(expression.toSql()))) { + String sql = expression.toSql(); + if (mvColumnNames.contains(sql) || mvColumnNames + .contains(org.apache.doris.analysis.CreateMaterializedViewStmt.mvColumnBreaker(sql))) { return true; } if (expression.children().isEmpty()) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/SelectMaterializedIndexWithAggregate.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/SelectMaterializedIndexWithAggregate.java index 468b9cf659c5b2..fa03165b37c337 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/SelectMaterializedIndexWithAggregate.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/SelectMaterializedIndexWithAggregate.java @@ -625,9 +625,13 @@ private SelectResult select(LogicalOlapScan scan, Set requiredScanOutput, aggFuncsDiff(aggregateFunctions, aggRewriteResult), groupingExprs).isOn()) .collect(Collectors.toSet()); + Set candidatesWithRewritingIndexes = candidatesWithRewriting.stream() + .map(result -> result.index) + .collect(Collectors.toSet()); + Set candidatesWithoutRewriting = indexesGroupByIsBaseOrNot .getOrDefault(false, ImmutableList.of()).stream() - .filter(index -> !candidatesWithRewriting.contains(index)) + .filter(index -> !candidatesWithRewritingIndexes.contains(index)) .filter(index -> preAggEnabledByHint(scan) || checkPreAggStatus(scan, index.getId(), predicates, aggregateFunctions, groupingExprs).isOn()) .collect(Collectors.toSet()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/stats/ExpressionEstimation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/stats/ExpressionEstimation.java index 7d1b5439bace23..825bb6f7180d16 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/stats/ExpressionEstimation.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/stats/ExpressionEstimation.java @@ -101,6 +101,7 @@ import com.google.common.base.Preconditions; import org.apache.commons.collections.CollectionUtils; +import java.time.DateTimeException; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; @@ -212,7 +213,7 @@ private ColumnStatistic castMinMax(ColumnStatistic colStats, DataType targetType long min = dateMinLiteral.getValue(); builder.setMinValue(min); builder.setMinExpr(dateMinLiteral.toLegacyLiteral()); - } catch (AnalysisException e) { + } catch (AnalysisException | DateTimeException e) { convertSuccess = false; } } @@ -223,7 +224,7 @@ private ColumnStatistic castMinMax(ColumnStatistic colStats, DataType targetType long max = dateMaxLiteral.getValue(); builder.setMaxValue(max); builder.setMaxExpr(dateMaxLiteral.toLegacyLiteral()); - } catch (AnalysisException e) { + } catch (AnalysisException | DateTimeException e) { convertSuccess = false; } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/AggregateExpression.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/AggregateExpression.java index 2e20dd05180a71..86d7eb72382300 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/AggregateExpression.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/AggregateExpression.java @@ -100,7 +100,7 @@ public AggregateExpression withChildren(List children) { } @Override - public String toSql() { + public String computeToSql() { if (aggregateParam.aggMode.productAggregateBuffer) { return "partial_" + function.toSql(); } else { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Alias.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Alias.java index 9eea3afd879e67..53a82011ac4c3c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Alias.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Alias.java @@ -124,7 +124,7 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { return child().toSql() + " AS `" + name.get() + "`"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ArrayItemReference.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ArrayItemReference.java index c54ad358561d8e..edc074af2b513a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ArrayItemReference.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ArrayItemReference.java @@ -92,7 +92,7 @@ public DataType getDataType() { } @Override - public String toSql() { + public String computeToSql() { return child(0).toSql(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BinaryOperator.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BinaryOperator.java index 750f3a77881430..f699e7531f6207 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BinaryOperator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BinaryOperator.java @@ -49,7 +49,7 @@ public List expectedInputTypes() { } @Override - public String toSql() { + public String computeToSql() { return "(" + left().toSql() + " " + symbol + " " + right().toSql() + ")"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BoundStar.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BoundStar.java index 8b4bffad3fc817..0789d9a65279db 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BoundStar.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BoundStar.java @@ -35,7 +35,7 @@ public BoundStar(List children) { ); } - public String toSql() { + public String computeToSql() { return children.stream().map(Expression::toSql).collect(Collectors.joining(", ")); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CaseWhen.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CaseWhen.java index bd48b648a73dfb..0c3687f57153f2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CaseWhen.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CaseWhen.java @@ -111,7 +111,7 @@ public String toString() { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { StringBuilder output = new StringBuilder("CASE"); for (Expression child : children()) { if (child instanceof WhenClause) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Cast.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Cast.java index 9122f0f4adbb0a..20f8079bd9f141 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Cast.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Cast.java @@ -95,7 +95,7 @@ public Cast withChildren(List children) { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { return "cast(" + child().toSql() + " as " + targetType.toSql() + ")"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CompoundPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CompoundPredicate.java index d58d1ba8193de5..9b1535eb9cc3c9 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CompoundPredicate.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CompoundPredicate.java @@ -101,7 +101,7 @@ public boolean equals(Object o) { } @Override - public String toSql() { + public String computeToSql() { StringBuilder sb = new StringBuilder(); children().forEach(c -> sb.append(c.toSql()).append(",")); sb.deleteCharAt(sb.length() - 1); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Exists.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Exists.java index 3d3bd17c70e12c..8d097d0faa6f45 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Exists.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Exists.java @@ -65,8 +65,8 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { - return "EXISTS (SUBQUERY) " + super.toSql(); + public String computeToSql() { + return "EXISTS (SUBQUERY) " + super.computeToSql(); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Expression.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Expression.java index e20290e8b59c41..6634d5e0160ead 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Expression.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Expression.java @@ -20,6 +20,7 @@ import org.apache.doris.nereids.analyzer.Unbound; import org.apache.doris.nereids.analyzer.UnboundVariable; import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.nereids.exceptions.UnboundException; import org.apache.doris.nereids.trees.AbstractTreeNode; import org.apache.doris.nereids.trees.expressions.ArrayItemReference.ArrayItemSlot; import org.apache.doris.nereids.trees.expressions.functions.ExpressionTrait; @@ -68,6 +69,7 @@ public abstract class Expression extends AbstractTreeNode implements private final Supplier> inputSlots = Suppliers.memoize( () -> collect(e -> e instanceof Slot && !(e instanceof ArrayItemSlot))); private final int fastChildrenHashCode; + private final Supplier toSqlCache = Suppliers.memoize(this::computeToSql); protected Expression(Expression... children) { super(children); @@ -210,6 +212,10 @@ public int fastChildrenHashCode() { return fastChildrenHashCode; } + protected String computeToSql() { + throw new UnboundException("sql"); + } + protected TypeCheckResult checkInputDataTypesInternal() { return TypeCheckResult.SUCCESS; } @@ -301,6 +307,10 @@ public boolean isInferred() { return inferred; } + public final String toSql() { + return toSqlCache.get(); + } + @Override public Expression withChildren(List children) { throw new RuntimeException(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InPredicate.java index 53a753c4535dd1..b8c0cf54471901 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InPredicate.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InPredicate.java @@ -122,7 +122,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { return compareExpr.toSql() + " IN " + options.stream() .map(Expression::toSql).sorted() .collect(Collectors.joining(", ", "(", ")")); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InSubquery.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InSubquery.java index 8b7d0518181fda..71dc1f5eb4f08f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InSubquery.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InSubquery.java @@ -77,8 +77,8 @@ public boolean nullable() throws UnboundException { } @Override - public String toSql() { - return this.compareExpr.toSql() + " IN (" + super.toSql() + ")"; + public String computeToSql() { + return this.compareExpr.toSql() + " IN (" + super.computeToSql() + ")"; } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/IsNull.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/IsNull.java index 7bb8538fc75031..22216a84bafe87 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/IsNull.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/IsNull.java @@ -55,7 +55,7 @@ public IsNull withChildren(List children) { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { return child().toSql() + " IS NULL"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ListQuery.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ListQuery.java index 214525d2594580..16dade740b9422 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ListQuery.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ListQuery.java @@ -48,8 +48,8 @@ public DataType getDataType() { } @Override - public String toSql() { - return " (LISTQUERY) " + super.toSql(); + public String computeToSql() { + return " (LISTQUERY) " + super.computeToSql(); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Match.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Match.java index d9dcde287d3884..405e3cb8fe4612 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Match.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Match.java @@ -76,7 +76,7 @@ public boolean nullable() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { return "(" + left().toSql() + " " + symbol + " " + right().toSql() + ")"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Not.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Not.java index 5061cab5ac9631..b001da9118fea3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Not.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Not.java @@ -102,7 +102,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { return "( not " + child().toSql() + ")"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/OrderExpression.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/OrderExpression.java index d09fe2c0a00ed3..7e33d4315d81ad 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/OrderExpression.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/OrderExpression.java @@ -81,7 +81,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { return orderKey.toSql(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Placeholder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Placeholder.java index c79c2d9db6d0e9..3ce8cdb017f9b8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Placeholder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Placeholder.java @@ -66,7 +66,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { return "?"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Properties.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Properties.java index db0c78c1f78f57..d604e919e31d17 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Properties.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Properties.java @@ -56,7 +56,7 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { return getMap() .entrySet() .stream() diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ScalarSubquery.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ScalarSubquery.java index 178debe7db83a5..25a7052a4acabc 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ScalarSubquery.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ScalarSubquery.java @@ -84,8 +84,8 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { - return " (SCALARSUBQUERY) " + super.toSql(); + public String computeToSql() { + return " (SCALARSUBQUERY) " + super.computeToSql(); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SlotReference.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SlotReference.java index e90bc3a5ecfaf4..890fbdfdb96f72 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SlotReference.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SlotReference.java @@ -183,7 +183,7 @@ public Optional getTable() { } @Override - public String toSql() { + public String computeToSql() { if (subPath.isEmpty()) { return name.get(); } else { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/StringRegexPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/StringRegexPredicate.java index 8900ac928590c3..5a62be54f93d81 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/StringRegexPredicate.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/StringRegexPredicate.java @@ -55,7 +55,7 @@ public List getSignatures() { } @Override - public String toSql() { + public String computeToSql() { return '(' + left().toSql() + ' ' + getName() + ' ' + right().toSql() + ')'; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SubqueryExpr.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SubqueryExpr.java index 35d0e566476880..c08fda1dc6b713 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SubqueryExpr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SubqueryExpr.java @@ -80,7 +80,7 @@ public boolean nullable() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { return "(" + queryPlan + ")"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/TimestampArithmetic.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/TimestampArithmetic.java index d3e326fa48a574..40a727eb1757ba 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/TimestampArithmetic.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/TimestampArithmetic.java @@ -129,7 +129,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { StringBuilder strBuilder = new StringBuilder(); if (funcName != null) { // Function-call like version. diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/UnaryOperator.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/UnaryOperator.java index ace2c648daec79..61efa91f2621fc 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/UnaryOperator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/UnaryOperator.java @@ -46,7 +46,7 @@ public List expectedInputTypes() { } @Override - public String toSql() { + public String computeToSql() { return "(" + symbol + " " + child().toSql() + ")"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Variable.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Variable.java index fd16b84b183c7b..5944ec08744980 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Variable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Variable.java @@ -85,7 +85,7 @@ public String toString() throws UnboundException { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { return toString(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VariableDesc.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VariableDesc.java index 38f23ee40fafe9..3a16b38f9e19e0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VariableDesc.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VariableDesc.java @@ -50,7 +50,7 @@ public SetType getSetType() { } @Override - public String toSql() { + public String computeToSql() { return toString(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VirtualSlotReference.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VirtualSlotReference.java index 43f4853758105c..1b46a8552bafb9 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VirtualSlotReference.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VirtualSlotReference.java @@ -82,7 +82,7 @@ public R accept(ExpressionVisitor visitor, C context) { } @Override - public String toSql() { + public String computeToSql() { return getName(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WhenClause.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WhenClause.java index 4ce77f22df1692..adb862bb2f1041 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WhenClause.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WhenClause.java @@ -56,7 +56,7 @@ public Expression getResult() { } @Override - public String toSql() { + public String computeToSql() { return " WHEN " + left().toSql() + " THEN " + right().toSql(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowExpression.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowExpression.java index 5bea07fff00326..7f26298c700626 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowExpression.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowExpression.java @@ -179,7 +179,7 @@ public int hashCode() { } @Override - public String toSql() { + public String computeToSql() { StringBuilder sb = new StringBuilder(); sb.append(function.toSql()).append(" OVER("); if (!partitionKeys.isEmpty()) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowFrame.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowFrame.java index 5cbb93ce3748ea..58ed4f15f9baa0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowFrame.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowFrame.java @@ -95,7 +95,7 @@ public int hashCode() { } @Override - public String toSql() { + public String computeToSql() { StringBuilder sb = new StringBuilder(); sb.append(frameUnits + " "); if (rightBoundary != null) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/BoundFunction.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/BoundFunction.java index 5ccc64a34bb43b..13d4b515ad75f3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/BoundFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/BoundFunction.java @@ -85,7 +85,7 @@ public int hashCode() { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { StringBuilder sql = new StringBuilder(getName()).append("("); int arity = arity(); for (int i = 0; i < arity; i++) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateAddSubMonotonic.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateAddSubMonotonic.java new file mode 100644 index 00000000000000..7fec22fd9d317d --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateAddSubMonotonic.java @@ -0,0 +1,38 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.expressions.functions; + +import org.apache.doris.nereids.trees.expressions.literal.Literal; + +/** monotonicity for XX_ADD XX_SUB */ +public interface DateAddSubMonotonic extends Monotonic { + @Override + default boolean isMonotonic(Literal lower, Literal upper) { + return child(1) instanceof Literal; + } + + @Override + default boolean isPositive() { + return true; + } + + @Override + default int getMonotonicFunctionChildIndex() { + return 0; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateCeilFloorMonotonic.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateCeilFloorMonotonic.java new file mode 100644 index 00000000000000..71ad80a347176a --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateCeilFloorMonotonic.java @@ -0,0 +1,47 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.expressions.functions; + +import org.apache.doris.nereids.trees.expressions.literal.Literal; + +/** monotonicity of XX_CEIL and XX_FLOOR */ +public interface DateCeilFloorMonotonic extends Monotonic { + @Override + default boolean isMonotonic(Literal lower, Literal upper) { + switch (arity()) { + case 1: + return true; + case 2: + return !(child(0) instanceof Literal) && child(1) instanceof Literal; + case 3: + return !(child(0) instanceof Literal) && child(1) instanceof Literal && child(2) instanceof Literal; + default: + return false; + } + } + + @Override + default boolean isPositive() { + return true; + } + + @Override + default int getMonotonicFunctionChildIndex() { + return 0; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateDiffMonotonic.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateDiffMonotonic.java new file mode 100644 index 00000000000000..daaea895b6d15b --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateDiffMonotonic.java @@ -0,0 +1,39 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.expressions.functions; + +import org.apache.doris.nereids.trees.expressions.literal.Literal; + +/** monotonicity for XX_DIFF */ +public interface DateDiffMonotonic extends Monotonic { + @Override + default boolean isMonotonic(Literal lower, Literal upper) { + return !(child(0) instanceof Literal) && child(1) instanceof Literal + || child(0) instanceof Literal && !(child(1) instanceof Literal); + } + + @Override + default boolean isPositive() { + return child(1) instanceof Literal; + } + + @Override + default int getMonotonicFunctionChildIndex() { + return child(1) instanceof Literal ? 0 : 1; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/Monotonic.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/Monotonic.java index bcaa040cb2a650..feec5933890e67 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/Monotonic.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/Monotonic.java @@ -18,9 +18,14 @@ package org.apache.doris.nereids.trees.expressions.functions; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.literal.Literal; /** monotonicity of expressions */ public interface Monotonic extends ExpressionTrait { + default boolean isMonotonic(Literal lower, Literal upper) { + return true; + } + // true means that the function is an increasing function boolean isPositive(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/AggregateFunction.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/AggregateFunction.java index 90df2f531da3fb..777c9c4cc7add5 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/AggregateFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/AggregateFunction.java @@ -107,7 +107,7 @@ public boolean hasVarArguments() { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { StringBuilder sql = new StringBuilder(getName()).append("("); if (distinct) { sql.append("DISTINCT "); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/Count.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/Count.java index e86e90974da1bd..21e6ee1cba6b21 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/Count.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/Count.java @@ -119,11 +119,11 @@ public Count withDistinctAndChildren(boolean distinct, List children } @Override - public String toSql() { + public String computeToSql() { if (isStar) { return "count(*)"; } - return super.toSql(); + return super.computeToSql(); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/CryptoFunction.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/CryptoFunction.java index 151f7ffc7732be..1e4a866ecdcc53 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/CryptoFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/CryptoFunction.java @@ -42,7 +42,7 @@ public CryptoFunction(String name, List arguments) { } @Override - public String toSql() { + public String computeToSql() { List args = Lists.newArrayList(); for (int i = 0; i < arity(); i++) { if (i == 1) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayCeil.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayCeil.java index e77c307b523869..740363b50aad2b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayCeil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayCeil.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -37,7 +38,7 @@ * ScalarFunction 'day_ceil'. This class is generated by GenerateFunction. */ public class DayCeil extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -106,4 +107,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitDayCeil(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new DayCeil(literal); + case 2: + return new DayCeil(literal, child(1)); + case 3: + return new DayCeil(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayFloor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayFloor.java index b7e04e3a374629..5ba7fc13c7526b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayFloor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayFloor.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -37,7 +38,7 @@ * ScalarFunction 'day_floor'. This class is generated by GenerateFunction. */ public class DayFloor extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -106,4 +107,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitDayFloor(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new DayFloor(literal); + case 2: + return new DayFloor(literal, child(1)); + case 3: + return new DayFloor(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysAdd.java index e02c20eee82a04..a231816a330eff 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysAdd.java @@ -21,6 +21,7 @@ import org.apache.doris.common.Config; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ComputeSignatureForDateArithmetic; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -41,7 +42,7 @@ */ public class DaysAdd extends ScalarFunction implements BinaryExpression, ExplicitlyCastableSignature, - ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args { + ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args, DateAddSubMonotonic { // When enable_date_conversion is true, we prefer to V2 signature. // This preference follows original planner. refer to ScalarType.getDefaultDateType() private static final List SIGNATURES = Config.enable_date_conversion ? ImmutableList.of( @@ -77,4 +78,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitDaysAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new DaysAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysDiff.java index e0343f1148f162..c6f3377fdc99c5 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -37,7 +39,7 @@ * ScalarFunction 'days_diff'. This class is generated by GenerateFunction. */ public class DaysDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE) @@ -73,4 +75,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitDaysDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new DaysDiff(literal, child(1)); + } else { + return new DaysDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysSub.java index 8d135dc6c9cf72..5dab58ecdf63c5 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysSub.java @@ -21,6 +21,7 @@ import org.apache.doris.common.Config; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ComputeSignatureForDateArithmetic; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -41,7 +42,7 @@ */ public class DaysSub extends ScalarFunction implements BinaryExpression, ExplicitlyCastableSignature, - ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args { + ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args, DateAddSubMonotonic { // When enable_date_conversion is true, we prefer to V2 signature. // This preference follows original planner. refer to ScalarType.getDefaultDateType() private static final List SIGNATURES = Config.enable_date_conversion ? ImmutableList.of( @@ -77,4 +78,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitDaysSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new DaysSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/FromDays.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/FromDays.java index c3d19588ce5f2f..1799c1461fc1b4 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/FromDays.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/FromDays.java @@ -21,6 +21,7 @@ import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; +import org.apache.doris.nereids.trees.expressions.functions.Monotonic; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.shape.UnaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -36,7 +37,7 @@ * ScalarFunction 'from_days'. This class is generated by GenerateFunction. */ public class FromDays extends ScalarFunction - implements UnaryExpression, ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements UnaryExpression, ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, Monotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateV2Type.INSTANCE).args(IntegerType.INSTANCE) @@ -67,4 +68,19 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitFromDays(this, context); } + + @Override + public boolean isPositive() { + return true; + } + + @Override + public int getMonotonicFunctionChildIndex() { + return 0; + } + + @Override + public Expression withConstantArgs(Expression literal) { + return new FromDays(literal); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourCeil.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourCeil.java index e76151ef9d682c..13358e18d74770 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourCeil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourCeil.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -36,7 +37,7 @@ * ScalarFunction 'hour_ceil'. This class is generated by GenerateFunction. */ public class HourCeil extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -100,4 +101,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitHourCeil(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new HourCeil(literal); + case 2: + return new HourCeil(literal, child(1)); + case 3: + return new HourCeil(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourFloor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourFloor.java index 567f0a2dd188da..f48e26ea443f25 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourFloor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourFloor.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -36,7 +37,7 @@ * ScalarFunction 'hour_floor'. This class is generated by GenerateFunction. */ public class HourFloor extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -100,4 +101,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitHourFloor(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new HourFloor(literal); + case 2: + return new HourFloor(literal, child(1)); + case 3: + return new HourFloor(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursAdd.java index cb4e601b14d001..4c10b204597975 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursAdd.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -38,7 +39,8 @@ * ScalarFunction 'days_add'. */ public class HoursAdd extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT) @@ -67,4 +69,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitHoursAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new HoursAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursDiff.java index 452e3110ff764d..63942cd56e9e5b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -37,7 +39,7 @@ * ScalarFunction 'hours_diff'. This class is generated by GenerateFunction. */ public class HoursDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE) @@ -73,4 +75,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitHoursDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new HoursDiff(literal, child(1)); + } else { + return new HoursDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursSub.java index e53f8ecb90528a..49e8e5cb50f69a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursSub.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -38,7 +39,8 @@ * ScalarFunction 'hours_sub'. */ public class HoursSub extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT) @@ -67,4 +69,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitHoursSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new HoursSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/Lambda.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/Lambda.java index e8261f6391dda9..2ecab6090d8d3f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/Lambda.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/Lambda.java @@ -126,7 +126,7 @@ public boolean equals(Object o) { } @Override - public String toSql() { + public String computeToSql() { StringBuilder builder = new StringBuilder(); String argStr = argumentNames.get(0); if (argumentNames.size() > 1) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsAdd.java index 8d792259440dd2..33dd5809f2b1a8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsAdd.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -35,7 +36,8 @@ * ScalarFunction 'MicroSeconds_add'. */ public class MicroSecondsAdd extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.MAX) @@ -66,4 +68,9 @@ public FunctionSignature computeSignature(FunctionSignature signature) { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMicroSecondsAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MicroSecondsAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsDiff.java index 8bf3a9648396d7..160e8a96b13e68 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -35,7 +37,7 @@ * ScalarFunction 'microseconds_diff'. This class is generated by GenerateFunction. */ public class MicroSecondsDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE) @@ -67,4 +69,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMicroSecondsDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new MicroSecondsDiff(literal, child(1)); + } else { + return new MicroSecondsDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsSub.java index 2894d1fffc902f..20c880fb879298 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsSub.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -35,7 +36,8 @@ * ScalarFunction 'MicroSeconds_sub'. */ public class MicroSecondsSub extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.MAX) @@ -66,4 +68,9 @@ public FunctionSignature computeSignature(FunctionSignature signature) { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMicroSecondsSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MicroSecondsSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsAdd.java index 1cb56b13f84ed5..244b661db3afed 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsAdd.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -35,7 +36,8 @@ * ScalarFunction 'MilliSeconds_add'. */ public class MilliSecondsAdd extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.MAX) @@ -66,4 +68,9 @@ public FunctionSignature computeSignature(FunctionSignature signature) { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMilliSecondsAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MilliSecondsAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsDiff.java index 4500bd69460d98..0e8c623ce176b2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -35,7 +37,7 @@ * ScalarFunction 'milliseconds_diff'. This class is generated by GenerateFunction. */ public class MilliSecondsDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE) @@ -67,4 +69,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMilliSecondsDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new MilliSecondsDiff(literal, child(1)); + } else { + return new MilliSecondsDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsSub.java index 42891b7e7e0b22..10b4f8184d12d8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsSub.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -35,7 +36,8 @@ * ScalarFunction 'MilliSeconds_sub'. */ public class MilliSecondsSub extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.MAX) @@ -66,4 +68,9 @@ public FunctionSignature computeSignature(FunctionSignature signature) { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMilliSecondsSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MilliSecondsSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteCeil.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteCeil.java index b00eaff07c2c62..4f3e317d00ef39 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteCeil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteCeil.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.DateTimeType; @@ -35,7 +36,7 @@ * ScalarFunction 'minute_ceil'. This class is generated by GenerateFunction. */ public class MinuteCeil extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable { + implements ExplicitlyCastableSignature, AlwaysNullable, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -99,4 +100,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMinuteCeil(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new MinuteCeil(literal); + case 2: + return new MinuteCeil(literal, child(1)); + case 3: + return new MinuteCeil(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteFloor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteFloor.java index 683acc3a48381c..cefb5222c764ad 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteFloor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteFloor.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.DateTimeType; @@ -35,7 +36,7 @@ * ScalarFunction 'minute_floor'. This class is generated by GenerateFunction. */ public class MinuteFloor extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable { + implements ExplicitlyCastableSignature, AlwaysNullable, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -99,4 +100,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMinuteFloor(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new MinuteFloor(literal); + case 2: + return new MinuteFloor(literal, child(1)); + case 3: + return new MinuteFloor(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesAdd.java index f4c02fb84ca3d6..8ba1642f6248b6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesAdd.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -38,7 +39,8 @@ * ScalarFunction 'minutes_add'. */ public class MinutesAdd extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT) @@ -67,4 +69,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMinutesAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MinutesAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesDiff.java index 4d011116334bf4..91c254be7c14dc 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -37,7 +39,7 @@ * ScalarFunction 'minutes_diff'. This class is generated by GenerateFunction. */ public class MinutesDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE) @@ -73,4 +75,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMinutesDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new MinutesDiff(literal, child(1)); + } else { + return new MinutesDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesSub.java index 4fb616957813a7..2a29d9e1659963 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesSub.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -38,7 +39,8 @@ * ScalarFunction 'minutes_sub'. */ public class MinutesSub extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT) @@ -67,4 +69,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMinutesSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MinutesSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthCeil.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthCeil.java index 627568cf28a145..a1f4628dd6a9ab 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthCeil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthCeil.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -37,7 +38,7 @@ * ScalarFunction 'month_ceil'. This class is generated by GenerateFunction. */ public class MonthCeil extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -106,4 +107,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMonthCeil(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new MonthCeil(literal); + case 2: + return new MonthCeil(literal, child(1)); + case 3: + return new MonthCeil(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthFloor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthFloor.java index f3b0b66c1396d1..d55d52ab68ce5a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthFloor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthFloor.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -37,7 +38,7 @@ * ScalarFunction 'month_floor'. This class is generated by GenerateFunction. */ public class MonthFloor extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -106,4 +107,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMonthFloor(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new MonthFloor(literal); + case 2: + return new MonthFloor(literal, child(1)); + case 3: + return new MonthFloor(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsAdd.java index 5126400b71efaa..1cca6d8446fef6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsAdd.java @@ -21,6 +21,7 @@ import org.apache.doris.common.Config; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ComputeSignatureForDateArithmetic; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -41,7 +42,7 @@ */ public class MonthsAdd extends ScalarFunction implements BinaryExpression, ExplicitlyCastableSignature, - ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args { + ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args, DateAddSubMonotonic { // When enable_date_conversion is true, we prefer to V2 signature. // This preference follows original planner. refer to ScalarType.getDefaultDateType() @@ -78,4 +79,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMonthsAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MonthsAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsDiff.java index 373265b1e3822a..a850767a49fa40 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -37,7 +39,7 @@ * ScalarFunction 'months_diff'. This class is generated by GenerateFunction. */ public class MonthsDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE).args(DateV2Type.INSTANCE, DateV2Type.INSTANCE), @@ -73,4 +75,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMonthsDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new MonthsDiff(literal, child(1)); + } else { + return new MonthsDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsSub.java index 1c2985a6e136e2..9c5824a1b9eebb 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsSub.java @@ -21,6 +21,7 @@ import org.apache.doris.common.Config; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ComputeSignatureForDateArithmetic; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -41,7 +42,7 @@ */ public class MonthsSub extends ScalarFunction implements BinaryExpression, ExplicitlyCastableSignature, - ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args { + ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args, DateAddSubMonotonic { // When enable_date_conversion is true, we prefer to V2 signature. // This preference follows original planner. refer to ScalarType.getDefaultDateType() @@ -78,4 +79,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMonthsSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MonthsSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondCeil.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondCeil.java index 04cd08f4c8ce7b..3b0e657698743a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondCeil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondCeil.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -36,7 +37,7 @@ * ScalarFunction 'second_ceil'. This class is generated by GenerateFunction. */ public class SecondCeil extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -100,4 +101,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitSecondCeil(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new SecondCeil(literal); + case 2: + return new SecondCeil(literal, child(1)); + case 3: + return new SecondCeil(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondFloor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondFloor.java index ae0af1106073ac..c06fff06aed016 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondFloor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondFloor.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -36,7 +37,7 @@ * ScalarFunction 'second_floor'. This class is generated by GenerateFunction. */ public class SecondFloor extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -101,4 +102,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitSecondFloor(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new SecondFloor(literal); + case 2: + return new SecondFloor(literal, child(1)); + case 3: + return new SecondFloor(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsAdd.java index a6e131f5263537..3afa8f134193ec 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsAdd.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -38,7 +39,8 @@ * ScalarFunction 'minutes_add'. */ public class SecondsAdd extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT) @@ -67,4 +69,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitSecondsAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new SecondsAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsDiff.java index 4dd7e12b9e2f32..c81999d4fa2c98 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -37,7 +39,7 @@ * ScalarFunction 'seconds_diff'. This class is generated by GenerateFunction. */ public class SecondsDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE) @@ -73,4 +75,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitSecondsDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new SecondsDiff(literal, child(1)); + } else { + return new SecondsDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsSub.java index 37c59b2168bda2..d3093f84e1a2dc 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsSub.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -38,7 +39,8 @@ * ScalarFunction 'Seconds_sub'. */ public class SecondsSub extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT) @@ -67,4 +69,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitSecondsSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new SecondsSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/UnixTimestamp.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/UnixTimestamp.java index 633e1e7d4f3bda..178187ad9cbc65 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/UnixTimestamp.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/UnixTimestamp.java @@ -20,6 +20,9 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; +import org.apache.doris.nereids.trees.expressions.functions.Monotonic; +import org.apache.doris.nereids.trees.expressions.literal.DateTimeLiteral; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.DataType; import org.apache.doris.nereids.types.DateTimeType; @@ -39,7 +42,8 @@ /** * ScalarFunction 'unix_timestamp'. This class is generated by GenerateFunction. */ -public class UnixTimestamp extends ScalarFunction implements ExplicitlyCastableSignature { +public class UnixTimestamp extends ScalarFunction implements ExplicitlyCastableSignature, Monotonic { + private static final DateTimeLiteral MAX = new DateTimeLiteral("2038-01-19 03:14:07"); // we got changes when computeSignature private static final List SIGNATURES = ImmutableList.of( @@ -145,4 +149,37 @@ public R accept(ExpressionVisitor visitor, C context) { public boolean isDeterministic() { return !this.children.isEmpty(); } + + @Override + public boolean isPositive() { + return true; + } + + @Override + public int getMonotonicFunctionChildIndex() { + return 0; + } + + @Override + public Expression withConstantArgs(Expression literal) { + return new UnixTimestamp(literal); + } + + @Override + public boolean isMonotonic(Literal lower, Literal upper) { + if (arity() != 1) { + return false; + } + if (null == lower) { + lower = DateTimeLiteral.MIN_DATETIME; + } + if (null == upper) { + upper = DateTimeLiteral.MAX_DATETIME; + } + if (lower.compareTo(MAX) <= 0 && upper.compareTo(MAX) > 0) { + return false; + } else { + return true; + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearCeil.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearCeil.java index bc294638be99cd..37c952af815d86 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearCeil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearCeil.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.DateTimeType; @@ -36,7 +37,7 @@ * ScalarFunction 'year_ceil'. This class is generated by GenerateFunction. */ public class YearCeil extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable { + implements ExplicitlyCastableSignature, AlwaysNullable, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -105,4 +106,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitYearCeil(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new YearCeil(literal); + case 2: + return new YearCeil(literal, child(1)); + case 3: + return new YearCeil(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearFloor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearFloor.java index 5415502a769579..00a1ad918f7ffa 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearFloor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearFloor.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.DateTimeType; @@ -36,7 +37,7 @@ * ScalarFunction 'year_floor'. This class is generated by GenerateFunction. */ public class YearFloor extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable { + implements ExplicitlyCastableSignature, AlwaysNullable, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -105,4 +106,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitYearFloor(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new YearFloor(literal); + case 2: + return new YearFloor(literal, child(1)); + case 3: + return new YearFloor(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsAdd.java index 33c9e1c6dfa5e8..9b81378d9871bc 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsAdd.java @@ -21,6 +21,7 @@ import org.apache.doris.common.Config; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ComputeSignatureForDateArithmetic; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -41,7 +42,7 @@ */ public class YearsAdd extends ScalarFunction implements BinaryExpression, ExplicitlyCastableSignature, - ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args { + ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args, DateAddSubMonotonic { // When enable_date_conversion is true, we prefer to V2 signature. // This preference follows original planner. refer to ScalarType.getDefaultDateType() @@ -78,4 +79,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitYearsAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new YearsAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsDiff.java index e217d8da72902a..61b637449f1797 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -37,7 +39,7 @@ * ScalarFunction 'years_diff'. This class is generated by GenerateFunction. */ public class YearsDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE).args(DateV2Type.INSTANCE, DateV2Type.INSTANCE), @@ -73,4 +75,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitYearsDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new YearsDiff(literal, child(1)); + } else { + return new YearsDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsSub.java index b70444178df508..6f46727d937a28 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsSub.java @@ -21,6 +21,7 @@ import org.apache.doris.common.Config; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ComputeSignatureForDateArithmetic; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -41,7 +42,7 @@ */ public class YearsSub extends ScalarFunction implements BinaryExpression, ExplicitlyCastableSignature, - ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args { + ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args, DateAddSubMonotonic { // When enable_date_conversion is true, we prefer to V2 signature. // This preference follows original planner. refer to ScalarType.getDefaultDateType() @@ -78,4 +79,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitYearsSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new YearsSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/table/TableValuedFunction.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/table/TableValuedFunction.java index 837edf27ab1067..4a4257e67609d0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/table/TableValuedFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/table/TableValuedFunction.java @@ -130,7 +130,7 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { String args = getTVFProperties() .getMap() .entrySet() diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/ArrayLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/ArrayLiteral.java index 486eeddabd71c8..be84a5b32e35cf 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/ArrayLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/ArrayLiteral.java @@ -94,7 +94,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { String items = this.items.stream() .map(Literal::toSql) .collect(Collectors.joining(", ")); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteral.java index 6ea1d2af725679..eb8269d68fd0a8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteral.java @@ -30,6 +30,7 @@ import com.google.common.collect.ImmutableSet; +import java.time.DateTimeException; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.Year; @@ -269,8 +270,8 @@ static Result normalize(String s) { } /** parseDateLiteral */ - public static Result parseDateLiteral(String s) { - Result parseResult = parseDateTime(s); + public static Result parseDateLiteral(String s) { + Result parseResult = parseDateTime(s); if (parseResult.isError()) { return parseResult.cast(); } @@ -286,17 +287,24 @@ public static Result parseDateLiteral(String s) } /** parseDateTime */ - public static Result parseDateTime(String s) { - // fast parse '2022-01-01' - if (s.length() == 10 && s.charAt(4) == '-' && s.charAt(7) == '-') { - TemporalAccessor date = fastParseDate(s); - if (date != null) { - return Result.ok(date); - } - } - + public static Result parseDateTime(String s) { String originalString = s; try { + // fast parse '2022-01-01' + if ((s.length() == 10 || s.length() == 19) && s.charAt(4) == '-' && s.charAt(7) == '-') { + if (s.length() == 10) { + TemporalAccessor date = fastParseDate(s); + if (date != null) { + return Result.ok(date); + } + } else if (s.charAt(10) == ' ' && s.charAt(13) == ':' && s.charAt(16) == ':') { + TemporalAccessor date = fastParseDateTime(s); + if (date != null) { + return Result.ok(date); + } + } + } + TemporalAccessor dateTime; // remove suffix/prefix ' ' @@ -342,6 +350,10 @@ public static Result parseDateTime(String s } return Result.ok(dateTime); + } catch (DateTimeException e) { + return Result.err(() -> + new DateTimeException("date/datetime literal [" + originalString + "] is invalid", e) + ); } catch (Exception ex) { return Result.err(() -> new AnalysisException("date/datetime literal [" + originalString + "] is invalid")); } @@ -423,7 +435,7 @@ public String getStringValue() { } @Override - public String toSql() { + public String computeToSql() { return "'" + getStringValue() + "'"; } @@ -566,6 +578,21 @@ private static TemporalAccessor fastParseDate(String date) { } } + private static TemporalAccessor fastParseDateTime(String date) { + Integer year = readNextInt(date, 0, 4); + Integer month = readNextInt(date, 5, 2); + Integer day = readNextInt(date, 8, 2); + Integer hour = readNextInt(date, 11, 2); + Integer minute = readNextInt(date, 14, 2); + Integer second = readNextInt(date, 17, 2); + + if (year != null && month != null && day != null && hour != null && minute != null && second != null) { + return LocalDateTime.of(year, month, day, hour, minute, second); + } else { + return null; + } + } + private static Integer readNextInt(String str, int offset, int readLength) { int value = 0; int realReadLength = 0; diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java index 27470187eae0d2..7912142f97feb9 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java @@ -43,11 +43,10 @@ * date time literal. */ public class DateTimeLiteral extends DateLiteral { + public static final DateTimeLiteral MIN_DATETIME = new DateTimeLiteral(0000, 1, 1, 0, 0, 0); + public static final DateTimeLiteral MAX_DATETIME = new DateTimeLiteral(9999, 12, 31, 23, 59, 59); protected static final int MAX_MICROSECOND = 999999; - private static final DateTimeLiteral MIN_DATETIME = new DateTimeLiteral(0000, 1, 1, 0, 0, 0); - private static final DateTimeLiteral MAX_DATETIME = new DateTimeLiteral(9999, 12, 31, 23, 59, 59); - private static final Logger LOG = LogManager.getLogger(DateTimeLiteral.class); protected long hour; @@ -133,7 +132,7 @@ public static int determineScale(String s) { /** parseDateTimeLiteral */ public static Result parseDateTimeLiteral(String s, boolean isV2) { - Result parseResult = parseDateTime(s); + Result parseResult = parseDateTime(s); if (parseResult.isError()) { return parseResult.cast(); } @@ -267,7 +266,7 @@ public double getDouble() { } @Override - public String toSql() { + public String computeToSql() { return "'" + getStringValue() + "'"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalLiteral.java index 4ffc92c634d709..1f0aa788cdc641 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalLiteral.java @@ -112,7 +112,7 @@ public boolean equals(Object o) { } @Override - public String toSql() { + public String computeToSql() { return value.toPlainString(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalV3Literal.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalV3Literal.java index d8be4faf0c9395..045da28bdb38a4 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalV3Literal.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalV3Literal.java @@ -152,7 +152,7 @@ public boolean equals(Object o) { } @Override - public String toSql() { + public String computeToSql() { return value.toPlainString(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/Literal.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/Literal.java index e8e37aaf697e24..69e61b03c82314 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/Literal.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/Literal.java @@ -135,7 +135,7 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { return toString(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MapLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MapLiteral.java index c57bd3a04875e1..dbcf74c971e069 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MapLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MapLiteral.java @@ -114,7 +114,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { StringBuilder sb = new StringBuilder(); sb.append("map("); if (!keys.isEmpty()) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MaxLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MaxLiteral.java index ce1278a9ad4b26..763fdfb1f4f1f8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MaxLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MaxLiteral.java @@ -38,7 +38,7 @@ public LiteralExpr toLegacyLiteral() { } @Override - public String toSql() { + public String computeToSql() { return "MAX_VALUE"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/StructLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/StructLiteral.java index 3a46f1f5b83e7e..f44aa663c9eb03 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/StructLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/StructLiteral.java @@ -124,7 +124,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { StringBuilder sb = new StringBuilder(); sb.append("STRUCT("); for (int i = 0; i < fields.size(); i++) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/Explainable.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/Explainable.java index 46771392e59cd9..77eef860b98a77 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/Explainable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/Explainable.java @@ -17,11 +17,20 @@ package org.apache.doris.nereids.trees.plans; +import org.apache.doris.nereids.NereidsPlanner; +import org.apache.doris.nereids.StatementContext; +import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; import org.apache.doris.qe.ConnectContext; +import java.util.Optional; + /** * plan can be explained. */ public interface Explainable { Plan getExplainPlan(ConnectContext ctx) throws Exception; + + default Optional getExplainPlanner(LogicalPlan logicalPlan, StatementContext ctx) throws Exception { + return Optional.empty(); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java index dfc129f10b0fd6..2860ec10092312 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java @@ -43,6 +43,7 @@ public enum PlanType { LOGICAL_UNBOUND_ONE_ROW_RELATION, LOGICAL_UNBOUND_RELATION, LOGICAL_UNBOUND_TVF_RELATION, + LOGICAL_UNBOUND_INLINE_TABLE, // logical sinks LOGICAL_FILE_SINK, @@ -266,5 +267,6 @@ public enum PlanType { CREATE_ROUTINE_LOAD_COMMAND, SHOW_TABLE_CREATION_COMMAND, SHOW_QUERY_PROFILE_COMMAND, - SWITCH_COMMAND + SWITCH_COMMAND, + USE_COMMAND } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/algebra/InlineTable.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/algebra/InlineTable.java new file mode 100644 index 00000000000000..0aded14ca77119 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/algebra/InlineTable.java @@ -0,0 +1,28 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.algebra; + +import org.apache.doris.nereids.trees.expressions.NamedExpression; +import org.apache.doris.nereids.trees.plans.LeafPlan; + +import java.util.List; + +/** InlineTable */ +public interface InlineTable extends LeafPlan { + List> getConstantExprsList(); +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ExplainCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ExplainCommand.java index e3f2f1d732ae5a..ea805f6cb0ceb6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ExplainCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ExplainCommand.java @@ -79,12 +79,16 @@ public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { if (!(logicalPlan instanceof Explainable)) { throw new AnalysisException(logicalPlan.getClass().getSimpleName() + " cannot be explained"); } - explainPlan = ((LogicalPlan) ((Explainable) logicalPlan).getExplainPlan(ctx)); + Explainable explainable = (Explainable) logicalPlan; + explainPlan = ((LogicalPlan) explainable.getExplainPlan(ctx)); + NereidsPlanner planner = explainable.getExplainPlanner(explainPlan, ctx.getStatementContext()).orElseGet(() -> + new NereidsPlanner(ctx.getStatementContext()) + ); + LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(explainPlan, ctx.getStatementContext()); ExplainOptions explainOptions = new ExplainOptions(level, showPlanProcess); logicalPlanAdapter.setIsExplain(explainOptions); executor.setParsedStmt(logicalPlanAdapter); - NereidsPlanner planner = new NereidsPlanner(ctx.getStatementContext()); if (ctx.getSessionVariable().isEnableMaterializedViewRewrite()) { ctx.getStatementContext().addPlannerHook(InitMaterializationContextHook.INSTANCE); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/RecoverPartitionCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/RecoverPartitionCommand.java similarity index 100% rename from fe/fe-core/src/main/java/org/apache/doris/nereids/RecoverPartitionCommand.java rename to fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/RecoverPartitionCommand.java diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java index c2e9abd2f0f97c..8624bed9ceefae 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java @@ -25,8 +25,8 @@ import org.apache.doris.analysis.FunctionParams; import org.apache.doris.analysis.SlotRef; import org.apache.doris.analysis.StringLiteral; +import org.apache.doris.catalog.PartitionType; import org.apache.doris.common.DdlException; -import org.apache.doris.datasource.hive.HMSExternalTable; import org.apache.doris.mtmv.MTMVPartitionExprFactory; import org.apache.doris.mtmv.MTMVPartitionInfo; import org.apache.doris.mtmv.MTMVPartitionInfo.MTMVPartitionType; @@ -136,9 +136,9 @@ private RelatedTableInfo getRelatedTableInfo(NereidsPlanner planner, String part if (!partitionColumnNames.contains(relatedTableInfo.getColumn())) { throw new AnalysisException("error related column: " + relatedTableInfo.getColumn()); } - if (!(mtmvBaseRelatedTable instanceof HMSExternalTable) + if (!(mtmvBaseRelatedTable.getPartitionType(Optional.empty()).equals(PartitionType.LIST)) && partitionColumnNames.size() != 1) { - throw new AnalysisException("only hms table support multi column partition."); + throw new AnalysisException("only List PartitionType support multi column partition."); } return relatedTableInfo; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/BatchInsertIntoTableCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/BatchInsertIntoTableCommand.java index b4a7a9eee3a148..4fb42a21fd780d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/BatchInsertIntoTableCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/BatchInsertIntoTableCommand.java @@ -26,19 +26,22 @@ import org.apache.doris.common.ErrorCode; import org.apache.doris.common.ErrorReport; import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.nereids.CascadesContext; import org.apache.doris.nereids.NereidsPlanner; +import org.apache.doris.nereids.StatementContext; import org.apache.doris.nereids.analyzer.UnboundTableSink; import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.glue.LogicalPlanAdapter; +import org.apache.doris.nereids.properties.PhysicalProperties; import org.apache.doris.nereids.trees.TreeNode; import org.apache.doris.nereids.trees.expressions.ExprId; import org.apache.doris.nereids.trees.expressions.NamedExpression; import org.apache.doris.nereids.trees.plans.Explainable; import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; import org.apache.doris.nereids.trees.plans.commands.Command; import org.apache.doris.nereids.trees.plans.commands.NoForward; -import org.apache.doris.nereids.trees.plans.logical.LogicalInlineTable; import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; import org.apache.doris.nereids.trees.plans.physical.PhysicalOlapTableSink; import org.apache.doris.nereids.trees.plans.physical.PhysicalOneRowRelation; @@ -69,16 +72,34 @@ public class BatchInsertIntoTableCommand extends Command implements NoForward, E public static final Logger LOG = LogManager.getLogger(BatchInsertIntoTableCommand.class); - private LogicalPlan logicalQuery; + private LogicalPlan originLogicalQuery; + private Optional logicalQuery; public BatchInsertIntoTableCommand(LogicalPlan logicalQuery) { super(PlanType.BATCH_INSERT_INTO_TABLE_COMMAND); - this.logicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); + this.originLogicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); + this.logicalQuery = Optional.empty(); + } + + public LogicalPlan getLogicalQuery() { + return logicalQuery.orElse(originLogicalQuery); } @Override public Plan getExplainPlan(ConnectContext ctx) throws Exception { - return InsertUtils.getPlanForExplain(ctx, this.logicalQuery); + Optional analyzeContext = Optional.of( + CascadesContext.initContext(ctx.getStatementContext(), originLogicalQuery, PhysicalProperties.ANY) + ); + return InsertUtils.getPlanForExplain(ctx, analyzeContext, getLogicalQuery()); + } + + @Override + public Optional getExplainPlanner(LogicalPlan logicalPlan, StatementContext ctx) throws Exception { + ConnectContext connectContext = ctx.getConnectContext(); + TableIf targetTableIf = InsertUtils.getTargetTable(originLogicalQuery, connectContext); + boolean supportFastInsertIntoValues + = InsertUtils.supportFastInsertIntoValues(logicalPlan, targetTableIf, connectContext); + return Optional.of(new FastInsertIntoValuesPlanner(ctx, supportFastInsertIntoValues)); } @Override @@ -88,19 +109,32 @@ public R accept(PlanVisitor visitor, C context) { @Override public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { - UnboundTableSink unboundTableSink = (UnboundTableSink) logicalQuery; + UnboundTableSink unboundTableSink = (UnboundTableSink) originLogicalQuery; Plan query = unboundTableSink.child(); - if (!(query instanceof LogicalInlineTable)) { + if (!(query instanceof InlineTable)) { throw new AnalysisException("Insert into ** select is not supported in a transaction"); } PhysicalOlapTableSink sink; - TableIf targetTableIf = InsertUtils.getTargetTable(logicalQuery, ctx); + TableIf targetTableIf = InsertUtils.getTargetTable(originLogicalQuery, ctx); targetTableIf.readLock(); try { - this.logicalQuery = (LogicalPlan) InsertUtils.normalizePlan(logicalQuery, targetTableIf, Optional.empty()); - LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, ctx.getStatementContext()); - NereidsPlanner planner = new NereidsPlanner(ctx.getStatementContext()); + StatementContext statementContext = ctx.getStatementContext(); + Optional analyzeContext = Optional.of( + CascadesContext.initContext(statementContext, originLogicalQuery, PhysicalProperties.ANY) + ); + + this.logicalQuery = Optional.of((LogicalPlan) InsertUtils.normalizePlan( + originLogicalQuery, targetTableIf, analyzeContext, Optional.empty() + )); + + LogicalPlan logicalQuery = this.logicalQuery.get(); + LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, statementContext); + + boolean supportFastInsertIntoValues + = InsertUtils.supportFastInsertIntoValues(logicalQuery, targetTableIf, ctx); + FastInsertIntoValuesPlanner planner = new FastInsertIntoValuesPlanner( + statementContext, supportFastInsertIntoValues, true); planner.plan(logicalPlanAdapter, ctx.getSessionVariable().toThrift()); executor.checkBlockRules(); if (ctx.getConnectType() == ConnectType.MYSQL && ctx.getMysqlChannel() != null) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/FastInsertIntoValuesPlanner.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/FastInsertIntoValuesPlanner.java new file mode 100644 index 00000000000000..18dcbf25d28aa6 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/FastInsertIntoValuesPlanner.java @@ -0,0 +1,166 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.commands.insert; + +import org.apache.doris.nereids.CascadesContext; +import org.apache.doris.nereids.NereidsPlanner; +import org.apache.doris.nereids.StatementContext; +import org.apache.doris.nereids.memo.Group; +import org.apache.doris.nereids.memo.GroupId; +import org.apache.doris.nereids.properties.PhysicalProperties; +import org.apache.doris.nereids.rules.Rule; +import org.apache.doris.nereids.rules.implementation.LogicalOlapTableSinkToPhysicalOlapTableSink; +import org.apache.doris.nereids.trees.plans.Plan; +import org.apache.doris.nereids.trees.plans.logical.LogicalOlapTableSink; +import org.apache.doris.nereids.trees.plans.logical.LogicalOneRowRelation; +import org.apache.doris.nereids.trees.plans.logical.LogicalProject; +import org.apache.doris.nereids.trees.plans.logical.LogicalUnion; +import org.apache.doris.nereids.trees.plans.physical.PhysicalOneRowRelation; +import org.apache.doris.nereids.trees.plans.physical.PhysicalPlan; +import org.apache.doris.nereids.trees.plans.physical.PhysicalProject; +import org.apache.doris.nereids.trees.plans.physical.PhysicalUnion; +import org.apache.doris.nereids.trees.plans.visitor.DefaultPlanRewriter; + +import java.util.concurrent.atomic.AtomicReference; + +/** FastInsertIntoValuesPlanner */ +public class FastInsertIntoValuesPlanner extends NereidsPlanner { + private static final Rule toPhysicalOlapTableSink = new LogicalOlapTableSinkToPhysicalOlapTableSink() + .build(); + protected final boolean fastInsertIntoValues; + protected final boolean batchInsert; + private final AtomicReference rootGroupRef = new AtomicReference<>(); + + public FastInsertIntoValuesPlanner(StatementContext statementContext, boolean fastInsertIntoValues) { + this(statementContext, fastInsertIntoValues, false); + } + + public FastInsertIntoValuesPlanner( + StatementContext statementContext, boolean fastInsertIntoValues, boolean batchInsert) { + super(statementContext); + this.fastInsertIntoValues = fastInsertIntoValues; + this.batchInsert = batchInsert; + } + + @Override + protected void analyze(boolean showPlanProcess) { + if (!fastInsertIntoValues) { + super.analyze(showPlanProcess); + return; + } + CascadesContext cascadesContext = getCascadesContext(); + keepOrShowPlanProcess(showPlanProcess, () -> { + InsertIntoValuesAnalyzer analyzer = new InsertIntoValuesAnalyzer(cascadesContext, batchInsert); + analyzer.execute(); + }); + } + + @Override + protected void rewrite(boolean showPlanProcess) { + if (!fastInsertIntoValues) { + super.rewrite(showPlanProcess); + } + } + + @Override + protected void optimize() { + if (!fastInsertIntoValues) { + super.optimize(); + return; + } + + DefaultPlanRewriter optimizer = new DefaultPlanRewriter() { + @Override + public Plan visitLogicalUnion(LogicalUnion logicalUnion, Void context) { + logicalUnion = (LogicalUnion) super.visitLogicalUnion(logicalUnion, context); + + return new PhysicalUnion(logicalUnion.getQualifier(), + logicalUnion.getOutputs(), + logicalUnion.getRegularChildrenOutputs(), + logicalUnion.getConstantExprsList(), + logicalUnion.getLogicalProperties(), + logicalUnion.children() + ); + } + + @Override + public Plan visitLogicalOneRowRelation(LogicalOneRowRelation oneRowRelation, Void context) { + return new PhysicalOneRowRelation( + oneRowRelation.getRelationId(), + oneRowRelation.getProjects(), + oneRowRelation.getLogicalProperties()); + } + + @Override + public Plan visitLogicalProject(LogicalProject logicalProject, Void context) { + logicalProject = + (LogicalProject) super.visitLogicalProject(logicalProject, context); + + return new PhysicalProject<>( + logicalProject.getProjects(), + logicalProject.getLogicalProperties(), + logicalProject.child() + ); + } + + @Override + public Plan visitLogicalOlapTableSink(LogicalOlapTableSink olapTableSink, + Void context) { + olapTableSink = + (LogicalOlapTableSink) super.visitLogicalOlapTableSink(olapTableSink, context); + return toPhysicalOlapTableSink + .transform(olapTableSink, getCascadesContext()) + .get(0); + } + }; + + PhysicalPlan physicalPlan = + (PhysicalPlan) getCascadesContext().getRewritePlan().accept(optimizer, null); + + super.physicalPlan = physicalPlan; + + GroupId rootGroupId = GroupId.createGenerator().getNextId(); + Group rootGroup = new Group(rootGroupId, physicalPlan.getLogicalProperties()); + rootGroupRef.set(rootGroup); + } + + @Override + public Group getRoot() { + if (!fastInsertIntoValues) { + return super.getRoot(); + } + return rootGroupRef.get(); + } + + @Override + protected PhysicalPlan chooseNthPlan( + Group rootGroup, PhysicalProperties physicalProperties, int nthPlan) { + if (!fastInsertIntoValues) { + return super.chooseNthPlan(rootGroup, physicalProperties, nthPlan); + } + return super.physicalPlan; + } + + @Override + protected PhysicalPlan postProcess(PhysicalPlan physicalPlan) { + if (!fastInsertIntoValues) { + return super.postProcess(physicalPlan); + } + return physicalPlan; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java index 10f9947974cdb0..96d5d56a7e10ff 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java @@ -31,11 +31,13 @@ import org.apache.doris.datasource.jdbc.JdbcExternalTable; import org.apache.doris.load.loadv2.LoadStatistic; import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.nereids.CascadesContext; import org.apache.doris.nereids.NereidsPlanner; import org.apache.doris.nereids.StatementContext; import org.apache.doris.nereids.analyzer.UnboundTableSink; import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.glue.LogicalPlanAdapter; +import org.apache.doris.nereids.properties.PhysicalProperties; import org.apache.doris.nereids.trees.expressions.Slot; import org.apache.doris.nereids.trees.plans.Explainable; import org.apache.doris.nereids.trees.plans.Plan; @@ -85,8 +87,8 @@ public class InsertIntoTableCommand extends Command implements ForwardWithSync, public static final Logger LOG = LogManager.getLogger(InsertIntoTableCommand.class); - private LogicalPlan originalLogicalQuery; - private LogicalPlan logicalQuery; + private LogicalPlan originLogicalQuery; + private Optional logicalQuery; private Optional labelName; /** * When source it's from job scheduler,it will be set. @@ -101,15 +103,15 @@ public class InsertIntoTableCommand extends Command implements ForwardWithSync, public InsertIntoTableCommand(LogicalPlan logicalQuery, Optional labelName, Optional insertCtx, Optional cte) { super(PlanType.INSERT_INTO_TABLE_COMMAND); - this.originalLogicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); - this.logicalQuery = originalLogicalQuery; + this.originLogicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); this.labelName = Objects.requireNonNull(labelName, "labelName should not be null"); + this.logicalQuery = Optional.empty(); this.insertCtx = insertCtx; this.cte = cte; } public LogicalPlan getLogicalQuery() { - return logicalQuery; + return logicalQuery.orElse(originLogicalQuery); } public Optional getLabelName() { @@ -149,7 +151,7 @@ public AbstractInsertExecutor initPlan(ConnectContext ctx, StmtExecutor executor */ public AbstractInsertExecutor initPlan(ConnectContext ctx, StmtExecutor stmtExecutor, boolean needBeginTransaction) throws Exception { - List qualifiedTargetTableName = InsertUtils.getTargetTableQualified(logicalQuery, ctx); + List qualifiedTargetTableName = InsertUtils.getTargetTableQualified(originLogicalQuery, ctx); AbstractInsertExecutor insertExecutor; int retryTimes = 0; @@ -214,8 +216,6 @@ public AbstractInsertExecutor initPlan(ConnectContext ctx, StmtExecutor stmtExec // so we need to set this here insertExecutor.getCoordinator().setTxnId(insertExecutor.getTxnId()); stmtExecutor.setCoord(insertExecutor.getCoordinator()); - // for prepare and execute, avoiding normalization for every execute command - this.originalLogicalQuery = this.logicalQuery; return insertExecutor; } LOG.warn("insert plan failed {} times. query id is {}.", retryTimes, DebugUtil.printId(ctx.queryId())); @@ -226,17 +226,23 @@ private BuildInsertExecutorResult initPlanOnce(ConnectContext ctx, StmtExecutor stmtExecutor, TableIf targetTableIf) throws Throwable { targetTableIf.readLock(); try { + Optional analyzeContext = Optional.of( + CascadesContext.initContext(ctx.getStatementContext(), originLogicalQuery, PhysicalProperties.ANY) + ); // process inline table (default values, empty values) - this.logicalQuery = (LogicalPlan) InsertUtils.normalizePlan(originalLogicalQuery, targetTableIf, insertCtx); + this.logicalQuery = Optional.of((LogicalPlan) InsertUtils.normalizePlan( + originLogicalQuery, targetTableIf, analyzeContext, insertCtx + )); if (cte.isPresent()) { - this.logicalQuery = ((LogicalPlan) cte.get().withChildren(logicalQuery)); + this.logicalQuery = Optional.of((LogicalPlan) cte.get().withChildren(logicalQuery.get())); } - OlapGroupCommitInsertExecutor.analyzeGroupCommit(ctx, targetTableIf, this.logicalQuery, this.insertCtx); + OlapGroupCommitInsertExecutor.analyzeGroupCommit( + ctx, targetTableIf, this.logicalQuery.get(), this.insertCtx); } finally { targetTableIf.readUnlock(); } - LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, ctx.getStatementContext()); + LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery.get(), ctx.getStatementContext()); return planInsertExecutor(ctx, stmtExecutor, logicalPlanAdapter, targetTableIf); } @@ -362,6 +368,9 @@ private ExecutorFactory selectInsertExecutorFactory( private BuildInsertExecutorResult planInsertExecutor( ConnectContext ctx, StmtExecutor stmtExecutor, LogicalPlanAdapter logicalPlanAdapter, TableIf targetTableIf) throws Throwable { + LogicalPlan logicalPlan = logicalPlanAdapter.getLogicalPlan(); + + boolean supportFastInsertIntoValues = InsertUtils.supportFastInsertIntoValues(logicalPlan, targetTableIf, ctx); // the key logical when use new coordinator: // 1. use NereidsPlanner to generate PhysicalPlan // 2. use PhysicalPlan to select InsertExecutorFactory, some InsertExecutors want to control @@ -372,10 +381,9 @@ private BuildInsertExecutorResult planInsertExecutor( // 3. NereidsPlanner use PhysicalPlan and the provided backend to generate DistributePlan // 4. ExecutorFactory use the DistributePlan to generate the NereidsSqlCoordinator and InsertExecutor - StatementContext statementContext = ctx.getStatementContext(); - AtomicReference executorFactoryRef = new AtomicReference<>(); - NereidsPlanner planner = new NereidsPlanner(statementContext) { + FastInsertIntoValuesPlanner planner = new FastInsertIntoValuesPlanner( + ctx.getStatementContext(), supportFastInsertIntoValues) { @Override protected void doDistribute(boolean canUseNereidsDistributePlanner) { // when enter this method, the step 1 already executed @@ -406,12 +414,24 @@ private void runInternal(ConnectContext ctx, StmtExecutor executor) throws Excep } public boolean isExternalTableSink() { - return !(logicalQuery instanceof UnboundTableSink); + return !(getLogicalQuery() instanceof UnboundTableSink); } @Override public Plan getExplainPlan(ConnectContext ctx) { - return InsertUtils.getPlanForExplain(ctx, this.logicalQuery); + Optional analyzeContext = Optional.of( + CascadesContext.initContext(ctx.getStatementContext(), originLogicalQuery, PhysicalProperties.ANY) + ); + return InsertUtils.getPlanForExplain(ctx, analyzeContext, getLogicalQuery()); + } + + @Override + public Optional getExplainPlanner(LogicalPlan logicalPlan, StatementContext ctx) { + ConnectContext connectContext = ctx.getConnectContext(); + TableIf targetTableIf = InsertUtils.getTargetTable(originLogicalQuery, connectContext); + boolean supportFastInsertIntoValues + = InsertUtils.supportFastInsertIntoValues(logicalPlan, targetTableIf, connectContext); + return Optional.of(new FastInsertIntoValuesPlanner(ctx, supportFastInsertIntoValues)); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoValuesAnalyzer.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoValuesAnalyzer.java new file mode 100644 index 00000000000000..1c630a41c846fe --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoValuesAnalyzer.java @@ -0,0 +1,156 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.commands.insert; + +import org.apache.doris.common.Pair; +import org.apache.doris.nereids.CascadesContext; +import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.nereids.jobs.executor.AbstractBatchJobExecutor; +import org.apache.doris.nereids.jobs.rewrite.RewriteJob; +import org.apache.doris.nereids.rules.Rule; +import org.apache.doris.nereids.rules.RuleType; +import org.apache.doris.nereids.rules.analysis.BindSink; +import org.apache.doris.nereids.rules.expression.ExpressionRewrite; +import org.apache.doris.nereids.rules.expression.ExpressionRewriteRule; +import org.apache.doris.nereids.rules.expression.rules.ConvertAggStateCast; +import org.apache.doris.nereids.rules.expression.rules.FoldConstantRuleOnFE; +import org.apache.doris.nereids.rules.rewrite.MergeProjects; +import org.apache.doris.nereids.rules.rewrite.OneRewriteRuleFactory; +import org.apache.doris.nereids.rules.rewrite.PushProjectIntoOneRowRelation; +import org.apache.doris.nereids.rules.rewrite.PushProjectIntoUnion; +import org.apache.doris.nereids.trees.expressions.NamedExpression; +import org.apache.doris.nereids.trees.expressions.SlotReference; +import org.apache.doris.nereids.trees.expressions.StatementScopeIdGenerator; +import org.apache.doris.nereids.trees.plans.algebra.SetOperation.Qualifier; +import org.apache.doris.nereids.trees.plans.logical.LogicalOneRowRelation; +import org.apache.doris.nereids.trees.plans.logical.LogicalUnion; +import org.apache.doris.nereids.types.DataType; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.util.List; + +/** InsertIntoValuesAnalyzer */ +public class InsertIntoValuesAnalyzer extends AbstractBatchJobExecutor { + public static final List INSERT_JOBS = jobs( + bottomUp( + new InlineTableToUnionOrOneRowRelation(), + new BindSink(), + new MergeProjects(), + // after bind olap table sink, the LogicalProject will be generated under LogicalOlapTableSink, + // we should convert the agg state function in the project, and evaluate some env parameters + // like encrypt key reference, for example: `values (aes_encrypt("abc",key test.my_key))`, + // we should replace the `test.my_key` to real key + new RewriteInsertIntoExpressions(ExpressionRewrite.bottomUp( + ConvertAggStateCast.INSTANCE, + FoldConstantRuleOnFE.PATTERN_MATCH_INSTANCE + )) + ) + ); + + public static final List BATCH_INSERT_JOBS = jobs( + bottomUp( + new InlineTableToUnionOrOneRowRelation(), + new BindSink(), + new MergeProjects(), + + // the BatchInsertIntoTableCommand need send StringLiteral to backend, + // and only support alias(literal as xx) or alias(cast(literal as xx)), + // but not support alias(cast(slotRef as xx)) which create in BindSink, + // we should push down the cast into Union or OneRowRelation. + // the InsertIntoTableCommand support translate slotRef in the TPlan, + // so we don't need this rules, just evaluate in backend + new PushProjectIntoUnion(), + new PushProjectIntoOneRowRelation(), + + new RewriteBatchInsertIntoExpressions(ExpressionRewrite.bottomUp( + ConvertAggStateCast.INSTANCE, + FoldConstantRuleOnFE.PATTERN_MATCH_INSTANCE + )) + ) + ); + + private final boolean batchInsert; + + public InsertIntoValuesAnalyzer(CascadesContext cascadesContext, boolean batchInsert) { + super(cascadesContext); + this.batchInsert = batchInsert; + } + + @Override + public List getJobs() { + return batchInsert ? BATCH_INSERT_JOBS : INSERT_JOBS; + } + + // we only rewrite the project's expression + private static class RewriteInsertIntoExpressions extends ExpressionRewrite { + public RewriteInsertIntoExpressions(ExpressionRewriteRule... rules) { + super(rules); + } + + @Override + public List buildRules() { + return ImmutableList.of( + new ProjectExpressionRewrite().build() + ); + } + } + + // we only rewrite the project's and one row relation expression + private static class RewriteBatchInsertIntoExpressions extends ExpressionRewrite { + public RewriteBatchInsertIntoExpressions(ExpressionRewriteRule... rules) { + super(rules); + } + + @Override + public List buildRules() { + return ImmutableList.of( + new ProjectExpressionRewrite().build(), + new OneRowRelationExpressionRewrite().build() + ); + } + } + + private static class InlineTableToUnionOrOneRowRelation extends OneRewriteRuleFactory { + @Override + public Rule build() { + return inlineTable().then(inlineTable -> { + List> originConstants = inlineTable.getConstantExprsList(); + if (originConstants.size() > 1) { + Pair>, List> castedConstantsAndNullables + = LogicalUnion.castCommonDataTypeAndNullableByConstants(originConstants); + List> castedRows = castedConstantsAndNullables.key(); + List nullables = castedConstantsAndNullables.value(); + List outputs = Lists.newArrayList(); + List firstRow = originConstants.get(0); + for (int columnId = 0; columnId < firstRow.size(); columnId++) { + String name = firstRow.get(columnId).getName(); + DataType commonDataType = castedRows.get(0).get(columnId).getDataType(); + outputs.add(new SlotReference(name, commonDataType, nullables.get(columnId))); + } + return new LogicalUnion(Qualifier.ALL, castedRows, ImmutableList.of()).withNewOutputs(outputs); + } else if (originConstants.size() == 1) { + return new LogicalOneRowRelation(StatementScopeIdGenerator.newRelationId(), originConstants.get(0)); + } else { + throw new AnalysisException("Illegal inline table with empty constants"); + } + }).toRule(RuleType.LOGICAL_INLINE_TABLE_TO_LOGICAL_UNION_OR_ONE_ROW_RELATION); + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertOverwriteTableCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertOverwriteTableCommand.java index c89a4fc7be96ee..68c71de2d9e8b8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertOverwriteTableCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertOverwriteTableCommand.java @@ -32,13 +32,16 @@ import org.apache.doris.insertoverwrite.InsertOverwriteUtil; import org.apache.doris.mtmv.MTMVUtil; import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.nereids.CascadesContext; import org.apache.doris.nereids.NereidsPlanner; +import org.apache.doris.nereids.StatementContext; import org.apache.doris.nereids.analyzer.UnboundHiveTableSink; import org.apache.doris.nereids.analyzer.UnboundIcebergTableSink; import org.apache.doris.nereids.analyzer.UnboundTableSink; import org.apache.doris.nereids.analyzer.UnboundTableSinkCreator; import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.glue.LogicalPlanAdapter; +import org.apache.doris.nereids.properties.PhysicalProperties; import org.apache.doris.nereids.trees.TreeNode; import org.apache.doris.nereids.trees.plans.Explainable; import org.apache.doris.nereids.trees.plans.Plan; @@ -82,7 +85,8 @@ public class InsertOverwriteTableCommand extends Command implements ForwardWithS private static final Logger LOG = LogManager.getLogger(InsertOverwriteTableCommand.class); - private LogicalPlan logicalQuery; + private LogicalPlan originLogicalQuery; + private Optional logicalQuery; private Optional labelName; private final Optional cte; private AtomicBoolean isCancelled = new AtomicBoolean(false); @@ -94,7 +98,8 @@ public class InsertOverwriteTableCommand extends Command implements ForwardWithS public InsertOverwriteTableCommand(LogicalPlan logicalQuery, Optional labelName, Optional cte) { super(PlanType.INSERT_INTO_TABLE_COMMAND); - this.logicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); + this.originLogicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); + this.logicalQuery = Optional.empty(); this.labelName = Objects.requireNonNull(labelName, "labelName should not be null"); this.cte = cte; } @@ -103,14 +108,18 @@ public void setLabelName(Optional labelName) { this.labelName = labelName; } - public boolean isAutoDetectOverwrite() { + public boolean isAutoDetectOverwrite(LogicalPlan logicalQuery) { return (logicalQuery instanceof UnboundTableSink) - && ((UnboundTableSink) this.logicalQuery).isAutoDetectPartition(); + && ((UnboundTableSink) logicalQuery).isAutoDetectPartition(); + } + + public LogicalPlan getLogicalQuery() { + return logicalQuery.orElse(originLogicalQuery); } @Override public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { - TableIf targetTableIf = InsertUtils.getTargetTable(logicalQuery, ctx); + TableIf targetTableIf = InsertUtils.getTargetTable(originLogicalQuery, ctx); //check allow insert overwrite if (!allowInsertOverwrite(targetTableIf)) { String errMsg = "insert into overwrite only support OLAP and HMS/ICEBERG table." @@ -122,12 +131,20 @@ public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { if (targetTableIf instanceof MTMV && !MTMVUtil.allowModifyMTMVData(ctx)) { throw new AnalysisException("Not allowed to perform current operation on async materialized view"); } - this.logicalQuery = (LogicalPlan) InsertUtils.normalizePlan(logicalQuery, targetTableIf, Optional.empty()); + Optional analyzeContext = Optional.of( + CascadesContext.initContext(ctx.getStatementContext(), originLogicalQuery, PhysicalProperties.ANY) + ); + this.logicalQuery = Optional.of((LogicalPlan) InsertUtils.normalizePlan( + originLogicalQuery, targetTableIf, analyzeContext, Optional.empty())); if (cte.isPresent()) { - this.logicalQuery = (LogicalPlan) logicalQuery.withChildren(cte.get().withChildren( - this.logicalQuery.child(0))); + LogicalPlan logicalQuery = this.logicalQuery.get(); + this.logicalQuery = Optional.of( + (LogicalPlan) logicalQuery.withChildren( + cte.get().withChildren(logicalQuery.child(0)) + ) + ); } - + LogicalPlan logicalQuery = this.logicalQuery.get(); LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, ctx.getStatementContext()); NereidsPlanner planner = new NereidsPlanner(ctx.getStatementContext()); planner.plan(logicalPlanAdapter, ctx.getSessionVariable().toThrift()); @@ -172,7 +189,7 @@ public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { isRunning.set(true); long taskId = 0; try { - if (isAutoDetectOverwrite()) { + if (isAutoDetectOverwrite(getLogicalQuery())) { // taskId here is a group id. it contains all replace tasks made and registered in rpc process. taskId = insertOverwriteManager.registerTaskGroup(); // When inserting, BE will call to replace partition by FrontendService. FE will register new temp @@ -219,7 +236,7 @@ public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { } } catch (Exception e) { LOG.warn("insert into overwrite failed with task(or group) id " + taskId); - if (isAutoDetectOverwrite()) { + if (isAutoDetectOverwrite(getLogicalQuery())) { insertOverwriteManager.taskGroupFail(taskId); } else { insertOverwriteManager.taskFail(taskId); @@ -287,6 +304,7 @@ private void insertIntoPartitions(ConnectContext ctx, StmtExecutor executor, Lis // copy sink tot replace by tempPartitions UnboundLogicalSink copySink; InsertCommandContext insertCtx; + LogicalPlan logicalQuery = getLogicalQuery(); if (logicalQuery instanceof UnboundTableSink) { UnboundTableSink sink = (UnboundTableSink) logicalQuery; copySink = (UnboundLogicalSink) UnboundTableSinkCreator.createUnboundTableSink( @@ -342,6 +360,7 @@ private void insertIntoPartitions(ConnectContext ctx, StmtExecutor executor, Lis */ private void insertIntoAutoDetect(ConnectContext ctx, StmtExecutor executor, long groupId) throws Exception { InsertCommandContext insertCtx; + LogicalPlan logicalQuery = getLogicalQuery(); if (logicalQuery instanceof UnboundTableSink) { // 1. when overwrite auto-detect, allow auto partition or not is controlled by session variable. // 2. we save and pass overwrite auto detect by insertCtx @@ -362,7 +381,23 @@ private void insertIntoAutoDetect(ConnectContext ctx, StmtExecutor executor, lon @Override public Plan getExplainPlan(ConnectContext ctx) { - return InsertUtils.getPlanForExplain(ctx, this.logicalQuery); + Optional analyzeContext = Optional.of( + CascadesContext.initContext(ctx.getStatementContext(), originLogicalQuery, PhysicalProperties.ANY) + ); + return InsertUtils.getPlanForExplain(ctx, analyzeContext, getLogicalQuery()); + } + + @Override + public Optional getExplainPlanner(LogicalPlan logicalPlan, StatementContext ctx) { + LogicalPlan logicalQuery = getLogicalQuery(); + if (logicalQuery instanceof UnboundTableSink) { + boolean allowAutoPartition = ctx.getConnectContext().getSessionVariable().isEnableAutoCreateWhenOverwrite(); + OlapInsertCommandContext insertCtx = new OlapInsertCommandContext(allowAutoPartition, true); + InsertIntoTableCommand insertIntoTableCommand = new InsertIntoTableCommand( + logicalQuery, labelName, Optional.of(insertCtx), Optional.empty()); + return insertIntoTableCommand.getExplainPlanner(logicalPlan, ctx); + } + return Optional.empty(); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertUtils.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertUtils.java index 459ffcd04f894a..497a287e802823 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertUtils.java @@ -29,30 +29,40 @@ import org.apache.doris.common.FormatOptions; import org.apache.doris.datasource.hive.HMSExternalTable; import org.apache.doris.datasource.jdbc.JdbcExternalTable; +import org.apache.doris.nereids.CascadesContext; +import org.apache.doris.nereids.analyzer.Scope; import org.apache.doris.nereids.analyzer.UnboundAlias; +import org.apache.doris.nereids.analyzer.UnboundFunction; import org.apache.doris.nereids.analyzer.UnboundHiveTableSink; import org.apache.doris.nereids.analyzer.UnboundIcebergTableSink; +import org.apache.doris.nereids.analyzer.UnboundInlineTable; import org.apache.doris.nereids.analyzer.UnboundJdbcTableSink; -import org.apache.doris.nereids.analyzer.UnboundOneRowRelation; +import org.apache.doris.nereids.analyzer.UnboundSlot; +import org.apache.doris.nereids.analyzer.UnboundStar; import org.apache.doris.nereids.analyzer.UnboundTableSink; +import org.apache.doris.nereids.analyzer.UnboundVariable; import org.apache.doris.nereids.exceptions.AnalysisException; -import org.apache.doris.nereids.parser.LogicalPlanBuilder; import org.apache.doris.nereids.parser.NereidsParser; +import org.apache.doris.nereids.properties.PhysicalProperties; +import org.apache.doris.nereids.rules.analysis.ExpressionAnalyzer; +import org.apache.doris.nereids.rules.expression.ExpressionRewriteContext; +import org.apache.doris.nereids.rules.expression.rules.ConvertAggStateCast; +import org.apache.doris.nereids.rules.expression.rules.FoldConstantRuleOnFE; import org.apache.doris.nereids.trees.expressions.Alias; import org.apache.doris.nereids.trees.expressions.Cast; import org.apache.doris.nereids.trees.expressions.DefaultValueSlot; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.NamedExpression; -import org.apache.doris.nereids.trees.expressions.StatementScopeIdGenerator; import org.apache.doris.nereids.trees.expressions.literal.ArrayLiteral; import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.literal.NullLiteral; import org.apache.doris.nereids.trees.plans.Plan; -import org.apache.doris.nereids.trees.plans.algebra.SetOperation.Qualifier; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; import org.apache.doris.nereids.trees.plans.commands.info.DMLCommandType; import org.apache.doris.nereids.trees.plans.logical.LogicalInlineTable; import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; import org.apache.doris.nereids.trees.plans.logical.UnboundLogicalSink; +import org.apache.doris.nereids.types.AggStateType; import org.apache.doris.nereids.types.DataType; import org.apache.doris.nereids.util.RelationUtil; import org.apache.doris.nereids.util.TypeCoercionUtils; @@ -79,7 +89,9 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; @@ -260,7 +272,9 @@ private static void beginBatchInsertTransaction(ConnectContext ctx, /** * normalize plan to let it could be process correctly by nereids */ - public static Plan normalizePlan(Plan plan, TableIf table, Optional insertCtx) { + public static Plan normalizePlan(LogicalPlan plan, TableIf table, + Optional analyzeContext, + Optional insertCtx) { UnboundLogicalSink unboundLogicalSink = (UnboundLogicalSink) plan; if (table instanceof HMSExternalTable) { HMSExternalTable hiveTable = (HMSExternalTable) table; @@ -334,21 +348,39 @@ public static Plan normalizePlan(Plan plan, TableIf table, Optional oneRowRelationBuilder = ImmutableList.builder(); + + UnboundInlineTable unboundInlineTable = (UnboundInlineTable) query; + ImmutableList.Builder> optimizedRowConstructors + = ImmutableList.builderWithExpectedSize(unboundInlineTable.getConstantExprsList().size()); List columns = table.getBaseSchema(false); - for (List values : logicalInlineTable.getConstantExprsList()) { - ImmutableList.Builder constantExprs = ImmutableList.builder(); + ConnectContext context = ConnectContext.get(); + ExpressionRewriteContext rewriteContext = null; + if (context != null && context.getStatementContext() != null) { + rewriteContext = new ExpressionRewriteContext( + CascadesContext.initContext( + context.getStatementContext(), unboundInlineTable, PhysicalProperties.ANY + ) + ); + } + + Optional analyzer = analyzeContext.map( + cascadesContext -> buildExprAnalyzer(plan, cascadesContext) + ); + + for (List values : unboundInlineTable.getConstantExprsList()) { + ImmutableList.Builder optimizedRowConstructor = ImmutableList.builder(); if (values.isEmpty()) { if (CollectionUtils.isNotEmpty(unboundLogicalSink.getColNames())) { throw new AnalysisException("value list should not be empty if columns are specified"); } - for (Column column : columns) { - constantExprs.add(generateDefaultExpression(column)); + for (int i = 0; i < columns.size(); i++) { + Column column = columns.get(i); + NamedExpression defaultExpression = generateDefaultExpression(column); + addColumnValue(analyzer, optimizedRowConstructor, defaultExpression); } } else { if (CollectionUtils.isNotEmpty(unboundLogicalSink.getColNames())) { @@ -374,10 +406,15 @@ public static Plan normalizePlan(Plan plan, TableIf table, Optional oneRowRelations = oneRowRelationBuilder.build(); - if (oneRowRelations.size() == 1) { - return plan.withChildren(oneRowRelations.get(0)); - } else { - return plan.withChildren( - LogicalPlanBuilder.reduceToLogicalPlanTree(0, oneRowRelations.size() - 1, - oneRowRelations, Qualifier.ALL)); + return plan.withChildren(new LogicalInlineTable(optimizedRowConstructors.build())); + } + + /** buildAnalyzer */ + public static ExpressionAnalyzer buildExprAnalyzer(Plan plan, CascadesContext analyzeContext) { + return new ExpressionAnalyzer(plan, new Scope(ImmutableList.of()), + analyzeContext, false, false) { + @Override + public Expression visitCast(Cast cast, ExpressionRewriteContext context) { + Expression expr = super.visitCast(cast, context); + if (expr instanceof Cast) { + if (expr.child(0).getDataType() instanceof AggStateType) { + expr = ConvertAggStateCast.convert((Cast) expr); + } else { + expr = FoldConstantRuleOnFE.evaluate(expr, context); + } + } + return expr; + } + + @Override + public Expression visitUnboundFunction(UnboundFunction unboundFunction, ExpressionRewriteContext context) { + Expression expr = super.visitUnboundFunction(unboundFunction, context); + if (expr instanceof UnboundFunction) { + throw new IllegalStateException("Can not analyze function " + unboundFunction.getName()); + } + return expr; + } + + @Override + public Expression visitUnboundSlot(UnboundSlot unboundSlot, ExpressionRewriteContext context) { + Expression expr = super.visitUnboundSlot(unboundSlot, context); + if (expr instanceof UnboundFunction) { + throw new AnalysisException("Can not analyze slot " + unboundSlot.getName()); + } + return expr; + } + + @Override + public Expression visitUnboundVariable(UnboundVariable unboundVariable, ExpressionRewriteContext context) { + Expression expr = super.visitUnboundVariable(unboundVariable, context); + if (expr instanceof UnboundVariable) { + throw new AnalysisException("Can not analyze variable " + unboundVariable.getName()); + } + return expr; + } + + @Override + public Expression visitUnboundAlias(UnboundAlias unboundAlias, ExpressionRewriteContext context) { + Expression expr = super.visitUnboundAlias(unboundAlias, context); + if (expr instanceof UnboundVariable) { + throw new AnalysisException("Can not analyze alias"); + } + return expr; + } + + @Override + public Expression visitUnboundStar(UnboundStar unboundStar, ExpressionRewriteContext context) { + Expression expr = super.visitUnboundStar(unboundStar, context); + if (expr instanceof UnboundStar) { + List qualifier = unboundStar.getQualifier(); + List qualified = new ArrayList<>(qualifier); + qualified.add("*"); + throw new AnalysisException("Can not analyze " + StringUtils.join(qualified, ".")); + } + return expr; + } + }; + } + + private static void addColumnValue( + Optional analyzer, + ImmutableList.Builder optimizedRowConstructor, + NamedExpression value) { + if (analyzer.isPresent() && !(value instanceof Alias && value.child(0) instanceof Literal)) { + ExpressionAnalyzer expressionAnalyzer = analyzer.get(); + value = (NamedExpression) expressionAnalyzer.analyze( + value, new ExpressionRewriteContext(expressionAnalyzer.getCascadesContext()) + ); } + optimizedRowConstructor.add(value); } private static Expression castValue(Expression value, DataType targetType) { - if (value instanceof UnboundAlias) { - return value.withChildren(TypeCoercionUtils.castUnbound(((UnboundAlias) value).child(), targetType)); + if (value instanceof Alias) { + Expression oldChild = value.child(0); + Expression newChild = TypeCoercionUtils.castUnbound(oldChild, targetType); + return oldChild == newChild ? value : value.withChildren(newChild); + } else if (value instanceof UnboundAlias) { + UnboundAlias unboundAlias = (UnboundAlias) value; + return new Alias(TypeCoercionUtils.castUnbound(unboundAlias.child(), targetType)); } else { return TypeCoercionUtils.castUnbound(value, targetType); } @@ -484,8 +603,18 @@ private static NamedExpression generateDefaultExpression(Column column) { /** * get plan for explain. */ - public static Plan getPlanForExplain(ConnectContext ctx, LogicalPlan logicalQuery) { - return InsertUtils.normalizePlan(logicalQuery, InsertUtils.getTargetTable(logicalQuery, ctx), Optional.empty()); + public static Plan getPlanForExplain( + ConnectContext ctx, Optional analyzeContext, LogicalPlan logicalQuery) { + return InsertUtils.normalizePlan( + logicalQuery, InsertUtils.getTargetTable(logicalQuery, ctx), analyzeContext, Optional.empty()); + } + + /** supportFastInsertIntoValues */ + public static boolean supportFastInsertIntoValues( + LogicalPlan logicalPlan, TableIf targetTableIf, ConnectContext ctx) { + return logicalPlan instanceof UnboundTableSink && logicalPlan.child(0) instanceof InlineTable + && targetTableIf instanceof OlapTable + && ctx != null && ctx.getSessionVariable().isEnableFastAnalyzeInsertIntoValues(); } // check for insert into t1(a,b,gen_col) select 1,2,3; @@ -508,7 +637,7 @@ private static void checkGeneratedColumnForInsertIntoSelect(TableIf table, return; } Plan query = unboundLogicalSink.child(); - if (table instanceof OlapTable && !(query instanceof LogicalInlineTable)) { + if (table instanceof OlapTable && !(query instanceof InlineTable)) { OlapTable olapTable = (OlapTable) table; Set insertNames = Sets.newHashSet(); if (unboundLogicalSink.getColNames() != null) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/OlapGroupCommitInsertExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/OlapGroupCommitInsertExecutor.java index e7b1f4d581892c..0f3e320edcd4bf 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/OlapGroupCommitInsertExecutor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/OlapGroupCommitInsertExecutor.java @@ -30,6 +30,7 @@ import org.apache.doris.nereids.NereidsPlanner; import org.apache.doris.nereids.analyzer.UnboundTableSink; import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; import org.apache.doris.nereids.trees.plans.algebra.OneRowRelation; import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; import org.apache.doris.nereids.trees.plans.logical.LogicalUnion; @@ -93,8 +94,11 @@ protected static void analyzeGroupCommit(ConnectContext ctx, TableIf table, Logi conditions.add(Pair.of(() -> !(insertCtx.isPresent() && insertCtx.get() instanceof OlapInsertCommandContext && ((OlapInsertCommandContext) insertCtx.get()).isOverwrite()), () -> "is overwrite command")); conditions.add(Pair.of( - () -> tableSink.child() instanceof OneRowRelation || tableSink.child() instanceof LogicalUnion, - () -> "not one row relation or union, class: " + tableSink.child().getClass().getName())); + () -> tableSink.child() instanceof OneRowRelation + || tableSink.child() instanceof LogicalUnion + || tableSink.child() instanceof InlineTable, + () -> "not one row relation or union or inline table, class: " + + tableSink.child().getClass().getName())); ctx.setGroupCommit(conditions.stream().allMatch(p -> p.first.getAsBoolean())); if (!ctx.isGroupCommit() && LOG.isDebugEnabled()) { for (Pair> pair : conditions) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/use/UseCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/use/UseCommand.java new file mode 100644 index 00000000000000..9223e7d5ad66ed --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/use/UseCommand.java @@ -0,0 +1,115 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.commands.use; + +import org.apache.doris.analysis.StmtType; +import org.apache.doris.catalog.Env; +import org.apache.doris.common.AnalysisException; +import org.apache.doris.common.DdlException; +import org.apache.doris.common.ErrorCode; +import org.apache.doris.common.ErrorReport; +import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.commands.Command; +import org.apache.doris.nereids.trees.plans.commands.NoForward; +import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.qe.StmtExecutor; + +import com.google.common.base.Strings; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * Representation of a use db statement. + */ +public class UseCommand extends Command implements NoForward { + private static final Logger LOG = LogManager.getLogger(UseCommand.class); + private String catalogName; + private String databaseName; + + public UseCommand(String databaseName) { + super(PlanType.USE_COMMAND); + this.databaseName = databaseName; + } + + public UseCommand(String catalogName, String databaseName) { + super(PlanType.USE_COMMAND); + this.catalogName = catalogName; + this.databaseName = databaseName; + } + + @Override + public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { + validate(ctx); + handleUseStmt(ctx); + } + + @Override + public R accept(PlanVisitor visitor, C context) { + return visitor.visitUseCommand(this, context); + } + + @Override + public StmtType stmtType() { + return StmtType.USE; + } + + private void validate(ConnectContext context) throws AnalysisException { + if (Strings.isNullOrEmpty(databaseName)) { + ErrorReport.reportAnalysisException(ErrorCode.ERR_NO_DB_ERROR); + } + String currentCatalogName = catalogName == null ? ConnectContext.get().getDefaultCatalog() : catalogName; + + if (!Env.getCurrentEnv().getAccessManager() + .checkDbPriv(ConnectContext.get(), currentCatalogName, databaseName, PrivPredicate.SHOW)) { + ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR, context.getQualifiedUser(), + databaseName); + } + } + + /** + * Process use statement. + */ + private void handleUseStmt(ConnectContext context) { + try { + if (catalogName != null) { + context.getEnv().changeCatalog(context, catalogName); + } + context.getEnv().changeDb(context, databaseName); + } catch (DdlException e) { + LOG.warn("The handling of the use command failed.", e); + context.getState().setError(e.getMysqlErrorCode(), e.getMessage()); + return; + } + context.getState().setOk(); + } + + /** + * Generate sql string. + */ + public String toSql() { + StringBuilder sb = new StringBuilder(); + sb.append("USE "); + if (catalogName != null) { + sb.append("`").append(catalogName).append("`."); + } + sb.append("`").append(databaseName).append("`"); + return sb.toString(); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/distribute/DistributePlanner.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/distribute/DistributePlanner.java index 12ab8b42eaab61..75a2326236fc9b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/distribute/DistributePlanner.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/distribute/DistributePlanner.java @@ -73,6 +73,7 @@ public DistributePlanner(StatementContext statementContext, List f /** plan */ public FragmentIdMapping plan() { + updateProfileIfPresent(SummaryProfile::setQueryPlanFinishTime); try { FragmentIdMapping fragmentJobs = UnassignedJobBuilder.buildJobs(statementContext, idToFragments); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalInlineTable.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalInlineTable.java index b2a2a1d83ca3e7..748bc8fdfa2223 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalInlineTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalInlineTable.java @@ -17,15 +17,19 @@ package org.apache.doris.nereids.trees.plans.logical; +import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.memo.GroupExpression; import org.apache.doris.nereids.properties.LogicalProperties; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.NamedExpression; import org.apache.doris.nereids.trees.expressions.Slot; +import org.apache.doris.nereids.trees.expressions.SlotReference; import org.apache.doris.nereids.trees.plans.BlockFuncDepsPropagation; import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.nereids.util.Utils; import com.google.common.collect.ImmutableList; @@ -36,7 +40,7 @@ /** * represent value list such as values(1), (2), (3) will generate LogicalInlineTable((1), (2), (3)). */ -public class LogicalInlineTable extends LogicalLeaf implements BlockFuncDepsPropagation { +public class LogicalInlineTable extends LogicalLeaf implements InlineTable, BlockFuncDepsPropagation { private final List> constantExprsList; @@ -44,11 +48,16 @@ public LogicalInlineTable(List> constantExprsList) { this(constantExprsList, Optional.empty(), Optional.empty()); } + /** LogicalInlineTable */ public LogicalInlineTable(List> constantExprsList, Optional groupExpression, Optional logicalProperties) { super(PlanType.LOGICAL_INLINE_TABLE, groupExpression, logicalProperties); - this.constantExprsList = ImmutableList.copyOf( + + if (constantExprsList.isEmpty()) { + throw new AnalysisException("constantExprsList should now be empty"); + } + this.constantExprsList = Utils.fastToImmutableList( Objects.requireNonNull(constantExprsList, "constantExprsList should not be null")); } @@ -63,23 +72,49 @@ public R accept(PlanVisitor visitor, C context) { @Override public List getExpressions() { - return constantExprsList.stream().flatMap(List::stream).collect(ImmutableList.toImmutableList()); + ImmutableList.Builder expressions = ImmutableList.builderWithExpectedSize( + constantExprsList.size() * constantExprsList.get(0).size()); + + for (List namedExpressions : constantExprsList) { + expressions.addAll(namedExpressions); + } + + return expressions.build(); } @Override public Plan withGroupExpression(Optional groupExpression) { - return null; + return new LogicalInlineTable( + constantExprsList, groupExpression, Optional.of(getLogicalProperties()) + ); } @Override public Plan withGroupExprLogicalPropChildren(Optional groupExpression, Optional logicalProperties, List children) { - return null; + if (!children.isEmpty()) { + throw new AnalysisException("children should not be empty"); + } + return new LogicalInlineTable(constantExprsList, groupExpression, logicalProperties); } @Override public List computeOutput() { - return ImmutableList.of(); + int columnNum = constantExprsList.get(0).size(); + List firstRow = constantExprsList.get(0); + ImmutableList.Builder output = ImmutableList.builderWithExpectedSize(constantExprsList.size()); + for (int i = 0; i < columnNum; i++) { + NamedExpression firstRowColumn = firstRow.get(i); + boolean nullable = false; + for (List row : constantExprsList) { + if (row.get(i).nullable()) { + nullable = true; + break; + } + } + output.add(new SlotReference(firstRowColumn.getName(), firstRowColumn.getDataType(), nullable)); + } + return output.build(); } @Override @@ -98,4 +133,11 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(constantExprsList); } + + @Override + public String toString() { + return Utils.toSqlString("LogicalInlineTable[" + id.asInt() + "]", + "rowNum", constantExprsList.size(), + "constantExprsList", constantExprsList); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalOneRowRelation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalOneRowRelation.java index 7023815c7c5b99..9fa14458ed38b9 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalOneRowRelation.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalOneRowRelation.java @@ -54,7 +54,7 @@ public LogicalOneRowRelation(RelationId relationId, List projec private LogicalOneRowRelation(RelationId relationId, List projects, Optional groupExpression, Optional logicalProperties) { super(relationId, PlanType.LOGICAL_ONE_ROW_RELATION, groupExpression, logicalProperties); - this.projects = ImmutableList.copyOf(Objects.requireNonNull(projects, "projects can not be null")); + this.projects = Utils.fastToImmutableList(Objects.requireNonNull(projects, "projects can not be null")); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalSetOperation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalSetOperation.java index 2e4ddb55ff2f02..e13ec2864b3cd8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalSetOperation.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalSetOperation.java @@ -216,7 +216,8 @@ public int getArity() { return children.size(); } - private DataType getAssignmentCompatibleType(DataType left, DataType right) { + /** getAssignmentCompatibleType */ + public static DataType getAssignmentCompatibleType(DataType left, DataType right) { if (left.isNullType()) { return right; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalUnion.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalUnion.java index 459044100b632d..d9fae844c48912 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalUnion.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalUnion.java @@ -17,6 +17,7 @@ package org.apache.doris.nereids.trees.plans.logical; +import org.apache.doris.common.Pair; import org.apache.doris.nereids.memo.GroupExpression; import org.apache.doris.nereids.properties.DataTrait; import org.apache.doris.nereids.properties.LogicalProperties; @@ -28,11 +29,14 @@ import org.apache.doris.nereids.trees.plans.PlanType; import org.apache.doris.nereids.trees.plans.algebra.Union; import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.nereids.types.DataType; +import org.apache.doris.nereids.util.TypeCoercionUtils; import org.apache.doris.nereids.util.Utils; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; import java.util.ArrayList; import java.util.BitSet; @@ -205,6 +209,14 @@ public void computeUniform(DataTrait.Builder builder) { // don't propagate uniform slots } + @Override + public boolean hasUnboundExpression() { + if (!constantExprsList.isEmpty() && children.isEmpty()) { + return false; + } + return super.hasUnboundExpression(); + } + private List mapSlotToIndex(Plan plan, List> equalSlotsList) { Map slotToIndex = new HashMap<>(); for (int i = 0; i < plan.getOutput().size(); i++) { @@ -280,4 +292,73 @@ public void computeEqualSet(DataTrait.Builder builder) { public void computeFd(DataTrait.Builder builder) { // don't generate } + + /** castCommonDataTypeAndNullableByConstants */ + public static Pair>, List> castCommonDataTypeAndNullableByConstants( + List> constantExprsList) { + int columnCount = constantExprsList.isEmpty() ? 0 : constantExprsList.get(0).size(); + Pair, List> commonInfo + = computeCommonDataTypeAndNullable(constantExprsList, columnCount); + List> castedRows = castToCommonType(constantExprsList, commonInfo.key(), columnCount); + List nullables = commonInfo.second; + return Pair.of(castedRows, nullables); + } + + private static Pair, List> computeCommonDataTypeAndNullable( + List> constantExprsList, int columnCount) { + List nullables = Lists.newArrayListWithCapacity(columnCount); + List commonDataTypes = Lists.newArrayListWithCapacity(columnCount); + List firstRow = constantExprsList.get(0); + for (int columnId = 0; columnId < columnCount; columnId++) { + Expression constant = firstRow.get(columnId).child(0); + Pair commonDataTypeAndNullable + = computeCommonDataTypeAndNullable(constant, columnId, constantExprsList); + commonDataTypes.add(commonDataTypeAndNullable.first); + nullables.add(commonDataTypeAndNullable.second); + } + return Pair.of(commonDataTypes, nullables); + } + + private static Pair computeCommonDataTypeAndNullable( + Expression firstRowExpr, int columnId, List> constantExprsList) { + DataType commonDataType = firstRowExpr.getDataType(); + boolean nullable = firstRowExpr.nullable(); + for (int rowId = 1; rowId < constantExprsList.size(); rowId++) { + NamedExpression namedExpression = constantExprsList.get(rowId).get(columnId); + Expression otherConstant = namedExpression.child(0); + nullable |= otherConstant.nullable(); + DataType otherDataType = otherConstant.getDataType(); + commonDataType = getAssignmentCompatibleType(commonDataType, otherDataType); + } + return Pair.of(commonDataType, nullable); + } + + private static List> castToCommonType( + List> rows, List commonDataTypes, int columnCount) { + ImmutableList.Builder> castedConstants + = ImmutableList.builderWithExpectedSize(rows.size()); + for (List row : rows) { + castedConstants.add(castToCommonType(row, commonDataTypes)); + } + return castedConstants.build(); + } + + private static List castToCommonType(List row, List commonTypes) { + ImmutableList.Builder castedRow = ImmutableList.builderWithExpectedSize(row.size()); + boolean changed = false; + for (int columnId = 0; columnId < row.size(); columnId++) { + NamedExpression constantAlias = row.get(columnId); + Expression constant = constantAlias.child(0); + DataType commonType = commonTypes.get(columnId); + if (commonType.equals(constant.getDataType())) { + castedRow.add(constantAlias); + } else { + changed = true; + Expression expression + = TypeCoercionUtils.castIfNotSameTypeStrict(constant, commonType); + castedRow.add((NamedExpression) constantAlias.withChildren(expression)); + } + } + return changed ? castedRow.build() : row; + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java index efe0a03e3708b4..90e92ca1ae2597 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java @@ -178,7 +178,7 @@ public int hashCode() { @Override public String toString() { - return Utils.toSqlString("LogicalOlapTableSink[" + id.asInt() + "]", + return Utils.toSqlString("PhysicalOlapTableSink[" + id.asInt() + "]", "outputExprs", outputExprs, "database", database.getFullName(), "targetTable", targetTable.getName(), diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java index d3749e94d57d0f..122e513a08cb57 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java @@ -151,6 +151,7 @@ import org.apache.doris.nereids.trees.plans.commands.refresh.RefreshDatabaseCommand; import org.apache.doris.nereids.trees.plans.commands.refresh.RefreshTableCommand; import org.apache.doris.nereids.trees.plans.commands.use.SwitchCommand; +import org.apache.doris.nereids.trees.plans.commands.use.UseCommand; /** CommandVisitor. */ public interface CommandVisitor { @@ -697,4 +698,8 @@ default R visitShowQueryProfileCommand(ShowQueryProfileCommand showQueryProfileC default R visitSwitchCommand(SwitchCommand switchCommand, C context) { return visitCommand(switchCommand, context); } + + default R visitUseCommand(UseCommand useCommand, C context) { + return visitCommand(useCommand, context); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/PlanVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/PlanVisitor.java index 396c6e4f26569f..f7642ce572c1a3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/PlanVisitor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/PlanVisitor.java @@ -17,6 +17,7 @@ package org.apache.doris.nereids.trees.plans.visitor; +import org.apache.doris.nereids.analyzer.UnboundInlineTable; import org.apache.doris.nereids.trees.plans.GroupPlan; import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.commands.Command; @@ -99,6 +100,7 @@ public R visitCommand(Command command, C context) { return visit(command, context); } + // ******************************* // relations // ******************************* @@ -130,6 +132,10 @@ public R visitPhysicalSink(PhysicalSink physicalSink, C context) // ******************************* // Logical plans // ******************************* + public R visitUnboundInlineTable(UnboundInlineTable unboundInlineTable, C context) { + return visit(unboundInlineTable, context); + } + public R visitLogicalSqlCache(LogicalSqlCache sqlCache, C context) { return visit(sqlCache, context); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/util/TypeCoercionUtils.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/util/TypeCoercionUtils.java index 1da4353d20da33..1fb108b79fd6a6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/util/TypeCoercionUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/util/TypeCoercionUtils.java @@ -601,8 +601,7 @@ public static Optional characterLiteralTypeCoercion(String value, Da } else if (dataType.isDateTimeType() && DateTimeChecker.isValidDateTime(value)) { ret = DateTimeLiteral.parseDateTimeLiteral(value, false).orElse(null); } else if (dataType.isDateV2Type() && DateTimeChecker.isValidDateTime(value)) { - Result parseResult - = DateV2Literal.parseDateLiteral(value); + Result parseResult = DateV2Literal.parseDateLiteral(value); if (parseResult.isOk()) { ret = parseResult.get(); } else { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/util/Utils.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/util/Utils.java index 42b99f6effdb84..c111839fc5093e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/util/Utils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/util/Utils.java @@ -58,8 +58,16 @@ public class Utils { */ public static String quoteIfNeeded(String part) { // We quote strings except the ones which consist of digits only. - return part.matches("\\w*[\\w&&[^\\d]]+\\w*") - ? part : part.replace("`", "``"); + StringBuilder quote = new StringBuilder(part.length()); + for (int i = 0; i < part.length(); i++) { + char c = part.charAt(i); + if (c == '`') { + quote.append("``"); + } else { + quote.append(c); + } + } + return quote.toString(); } /** diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java b/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java index f29c617c15861e..446960f9d56415 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java @@ -37,9 +37,9 @@ import org.apache.doris.nereids.analyzer.UnboundTableSink; import org.apache.doris.nereids.glue.LogicalPlanAdapter; import org.apache.doris.nereids.trees.plans.Plan; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; import org.apache.doris.nereids.trees.plans.commands.NeedAuditEncryption; import org.apache.doris.nereids.trees.plans.commands.insert.InsertIntoTableCommand; -import org.apache.doris.nereids.trees.plans.logical.LogicalInlineTable; import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; import org.apache.doris.nereids.trees.plans.logical.LogicalUnion; import org.apache.doris.plugin.AuditEvent.AuditEventBuilder; @@ -162,8 +162,8 @@ private static int countValues(List children) { for (Plan child : children) { if (child instanceof UnboundOneRowRelation) { cnt++; - } else if (child instanceof LogicalInlineTable) { - cnt += ((LogicalInlineTable) child).getConstantExprsList().size(); + } else if (child instanceof InlineTable) { + cnt += ((InlineTable) child).getConstantExprsList().size(); } else if (child instanceof LogicalUnion) { cnt += countValues(child.children()); } @@ -216,40 +216,42 @@ private static void logAuditLogImpl(ConnectContext ctx, String origStmt, Stateme .setCommandType(ctx.getCommand().toString()); if (ctx.getState().isQuery()) { - if (!ctx.getSessionVariable().internalSession) { - MetricRepo.COUNTER_QUERY_ALL.increase(1L); - MetricRepo.USER_COUNTER_QUERY_ALL.getOrAdd(ctx.getQualifiedUser()).increase(1L); - } - try { - if (Config.isCloudMode()) { - cloudCluster = ctx.getCloudCluster(false); - } - } catch (ComputeGroupException e) { - LOG.warn("Failed to get cloud cluster", e); - return; - } - MetricRepo.increaseClusterQueryAll(cloudCluster); - if (ctx.getState().getStateType() == MysqlStateType.ERR - && ctx.getState().getErrType() != QueryState.ErrType.ANALYSIS_ERR) { - // err query + if (MetricRepo.isInit) { if (!ctx.getSessionVariable().internalSession) { - MetricRepo.COUNTER_QUERY_ERR.increase(1L); - MetricRepo.USER_COUNTER_QUERY_ERR.getOrAdd(ctx.getQualifiedUser()).increase(1L); - MetricRepo.increaseClusterQueryErr(cloudCluster); + MetricRepo.COUNTER_QUERY_ALL.increase(1L); + MetricRepo.USER_COUNTER_QUERY_ALL.getOrAdd(ctx.getQualifiedUser()).increase(1L); } - } else if (ctx.getState().getStateType() == MysqlStateType.OK - || ctx.getState().getStateType() == MysqlStateType.EOF) { - // ok query - if (!ctx.getSessionVariable().internalSession) { - MetricRepo.HISTO_QUERY_LATENCY.update(elapseMs); - MetricRepo.USER_HISTO_QUERY_LATENCY.getOrAdd(ctx.getQualifiedUser()).update(elapseMs); - MetricRepo.updateClusterQueryLatency(cloudCluster, elapseMs); + try { + if (Config.isCloudMode()) { + cloudCluster = ctx.getCloudCluster(false); + } + } catch (ComputeGroupException e) { + LOG.warn("Failed to get cloud cluster", e); + return; } + MetricRepo.increaseClusterQueryAll(cloudCluster); + if (ctx.getState().getStateType() == MysqlStateType.ERR + && ctx.getState().getErrType() != QueryState.ErrType.ANALYSIS_ERR) { + // err query + if (!ctx.getSessionVariable().internalSession) { + MetricRepo.COUNTER_QUERY_ERR.increase(1L); + MetricRepo.USER_COUNTER_QUERY_ERR.getOrAdd(ctx.getQualifiedUser()).increase(1L); + MetricRepo.increaseClusterQueryErr(cloudCluster); + } + } else if (ctx.getState().getStateType() == MysqlStateType.OK + || ctx.getState().getStateType() == MysqlStateType.EOF) { + // ok query + if (!ctx.getSessionVariable().internalSession) { + MetricRepo.HISTO_QUERY_LATENCY.update(elapseMs); + MetricRepo.USER_HISTO_QUERY_LATENCY.getOrAdd(ctx.getQualifiedUser()).update(elapseMs); + MetricRepo.updateClusterQueryLatency(cloudCluster, elapseMs); + } - if (elapseMs > Config.qe_slow_log_ms) { - String sqlDigest = DigestUtils.md5Hex(((Queriable) parsedStmt).toDigest()); - auditEventBuilder.setSqlDigest(sqlDigest); - MetricRepo.COUNTER_QUERY_SLOW.increase(1L); + if (elapseMs > Config.qe_slow_log_ms) { + String sqlDigest = DigestUtils.md5Hex(((Queriable) parsedStmt).toDigest()); + auditEventBuilder.setSqlDigest(sqlDigest); + MetricRepo.COUNTER_QUERY_SLOW.increase(1L); + } } } auditEventBuilder.setIsQuery(true) diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java index e0702d0015b022..3354b41ca1603b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java @@ -366,6 +366,8 @@ public class SessionVariable implements Serializable, Writable { public static final String ENABLE_SINGLE_REPLICA_INSERT = "enable_single_replica_insert"; + public static final String ENABLE_FAST_ANALYZE_INSERT_INTO_VALUES = "enable_fast_analyze_into_values"; + public static final String ENABLE_FUNCTION_PUSHDOWN = "enable_function_pushdown"; public static final String ENABLE_EXT_FUNC_PRED_PUSHDOWN = "enable_ext_func_pred_pushdown"; @@ -681,6 +683,8 @@ public class SessionVariable implements Serializable, Writable { public static final String ENABLE_MATCH_WITHOUT_INVERTED_INDEX = "enable_match_without_inverted_index"; public static final String ENABLE_FALLBACK_ON_MISSING_INVERTED_INDEX = "enable_fallback_on_missing_inverted_index"; + public static final String ENABLE_INVERTED_INDEX_SEARCHER_CACHE = "enable_inverted_index_searcher_cache"; + public static final String ENABLE_INVERTED_INDEX_QUERY_CACHE = "enable_inverted_index_query_cache"; public static final String IN_LIST_VALUE_COUNT_THRESHOLD = "in_list_value_count_threshold"; @@ -1497,6 +1501,15 @@ public void setEnableLeftZigZag(boolean enableLeftZigZag) { needForward = true, varType = VariableAnnotation.EXPERIMENTAL) public boolean enableSingleReplicaInsert = false; + @VariableMgr.VarAttr( + name = ENABLE_FAST_ANALYZE_INSERT_INTO_VALUES, fuzzy = true, + description = { + "跳过大部分的优化规则,快速分析insert into values语句", + "Skip most optimization rules and quickly analyze insert into values statements" + } + ) + private boolean enableFastAnalyzeInsertIntoValues = true; + @VariableMgr.VarAttr(name = ENABLE_FUNCTION_PUSHDOWN, fuzzy = true) public boolean enableFunctionPushdown = false; @@ -2313,6 +2326,18 @@ public void setIgnoreShapePlanNodes(String ignoreShapePlanNodes) { }) public boolean enableFallbackOnMissingInvertedIndex = true; + @VariableMgr.VarAttr(name = ENABLE_INVERTED_INDEX_SEARCHER_CACHE, description = { + "开启后会缓存倒排索引searcher", + "Enabling this will cache the inverted index searcher." + }) + public boolean enableInvertedIndexSearcherCache = true; + + @VariableMgr.VarAttr(name = ENABLE_INVERTED_INDEX_QUERY_CACHE, description = { + "开启后会缓存倒排索引查询结果", + "Enabling this will cache the results of inverted index queries." + }) + public boolean enableInvertedIndexQueryCache = true; + @VariableMgr.VarAttr(name = IN_LIST_VALUE_COUNT_THRESHOLD, description = { "in条件value数量大于这个threshold后将不会走fast_execute", "When the number of values in the IN condition exceeds this threshold," @@ -3651,8 +3676,6 @@ public boolean isEnableExprTrace() { return enableExprTrace; } - - public boolean isEnableSingleReplicaInsert() { return enableSingleReplicaInsert; } @@ -3661,6 +3684,14 @@ public void setEnableSingleReplicaInsert(boolean enableSingleReplicaInsert) { this.enableSingleReplicaInsert = enableSingleReplicaInsert; } + public boolean isEnableFastAnalyzeInsertIntoValues() { + return enableFastAnalyzeInsertIntoValues; + } + + public void setEnableFastAnalyzeInsertIntoValues(boolean enableFastAnalyzeInsertIntoValues) { + this.enableFastAnalyzeInsertIntoValues = enableFastAnalyzeInsertIntoValues; + } + public boolean isEnableMemtableOnSinkNode() { return enableMemtableOnSinkNode; } @@ -4002,6 +4033,8 @@ public TQueryOptions toThrift() { tResult.setEnableMatchWithoutInvertedIndex(enableMatchWithoutInvertedIndex); tResult.setEnableFallbackOnMissingInvertedIndex(enableFallbackOnMissingInvertedIndex); + tResult.setEnableInvertedIndexSearcherCache(enableInvertedIndexSearcherCache); + tResult.setEnableInvertedIndexQueryCache(enableInvertedIndexQueryCache); tResult.setHiveOrcUseColumnNames(hiveOrcUseColumnNames); tResult.setHiveParquetUseColumnNames(hiveParquetUseColumnNames); tResult.setKeepCarriageReturn(keepCarriageReturn); diff --git a/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStatisticsCacheLoader.java b/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStatisticsCacheLoader.java index 692d723ed0a9f5..8e29fe25d72574 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStatisticsCacheLoader.java +++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStatisticsCacheLoader.java @@ -42,7 +42,7 @@ protected Optional doLoad(StatisticsCacheKey key) { columnStatistic = table.getColumnStatistic(key.colName); } } catch (Throwable t) { - LOG.warn("Failed to load stats for column [Catalog:{}, DB:{}, Table:{}, Column:{}], Reason: {}", + LOG.info("Failed to load stats for column [Catalog:{}, DB:{}, Table:{}, Column:{}], Reason: {}", key.catalogId, key.dbId, key.tableId, key.colName, t.getMessage()); if (LOG.isDebugEnabled()) { LOG.debug(t); diff --git a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/BackendsTableValuedFunction.java b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/BackendsTableValuedFunction.java index 04ea7d01eae3dd..817bfefafdfbc3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/BackendsTableValuedFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/BackendsTableValuedFunction.java @@ -18,9 +18,13 @@ package org.apache.doris.tablefunction; import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.Env; import org.apache.doris.catalog.PrimitiveType; import org.apache.doris.catalog.ScalarType; +import org.apache.doris.common.ErrorCode; +import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.qe.ConnectContext; import org.apache.doris.thrift.TBackendsMetadataParams; import org.apache.doris.thrift.TMetaScanRange; import org.apache.doris.thrift.TMetadataType; @@ -83,6 +87,12 @@ public BackendsTableValuedFunction(Map params) throws AnalysisEx if (params.size() != 0) { throw new AnalysisException("backends table-valued-function does not support any params"); } + if (!Env.getCurrentEnv().getAccessManager() + .checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN_OR_NODE)) { + String message = ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR.formatErrorMsg( + PrivPredicate.ADMIN_OR_NODE.getPrivs().toString()); + throw new AnalysisException(message); + } } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsDisksTableValuedFunction.java b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsDisksTableValuedFunction.java index cc7ff82b8fb0e1..2c898a57afe96d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsDisksTableValuedFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsDisksTableValuedFunction.java @@ -18,8 +18,12 @@ package org.apache.doris.tablefunction; import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.Env; import org.apache.doris.catalog.ScalarType; +import org.apache.doris.common.ErrorCode; +import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.qe.ConnectContext; import org.apache.doris.thrift.TFrontendsMetadataParams; import org.apache.doris.thrift.TMetaScanRange; import org.apache.doris.thrift.TMetadataType; @@ -67,6 +71,12 @@ public FrontendsDisksTableValuedFunction(Map params) throws Anal if (params.size() != 0) { throw new AnalysisException("frontends_disks table-valued-function does not support any params"); } + if (!Env.getCurrentEnv().getAccessManager() + .checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN_OR_NODE)) { + String message = ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR.formatErrorMsg( + PrivPredicate.ADMIN_OR_NODE.getPrivs().toString()); + throw new AnalysisException(message); + } } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsTableValuedFunction.java b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsTableValuedFunction.java index aded1076a83d03..a9f48b6d1ff3d1 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsTableValuedFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsTableValuedFunction.java @@ -18,8 +18,12 @@ package org.apache.doris.tablefunction; import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.Env; import org.apache.doris.catalog.ScalarType; +import org.apache.doris.common.ErrorCode; +import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.qe.ConnectContext; import org.apache.doris.thrift.TFrontendsMetadataParams; import org.apache.doris.thrift.TMetaScanRange; import org.apache.doris.thrift.TMetadataType; @@ -76,6 +80,12 @@ public FrontendsTableValuedFunction(Map params) throws AnalysisE if (params.size() != 0) { throw new AnalysisException("frontends table-valued-function does not support any params"); } + if (!Env.getCurrentEnv().getAccessManager() + .checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN_OR_NODE)) { + String message = ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR.formatErrorMsg( + PrivPredicate.ADMIN_OR_NODE.getPrivs().toString()); + throw new AnalysisException(message); + } } @Override diff --git a/fe/fe-core/src/test/java/org/apache/doris/clone/TabletHealthTest.java b/fe/fe-core/src/test/java/org/apache/doris/clone/TabletHealthTest.java index b22925e5d89270..320bff45229fba 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/clone/TabletHealthTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/clone/TabletHealthTest.java @@ -40,12 +40,14 @@ import com.google.common.base.Joiner; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import com.google.common.collect.MinMaxPriorityQueue; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; public class TabletHealthTest extends TestWithFeService { @@ -78,6 +80,8 @@ protected void runBeforeAll() throws Exception { @Override protected void runBeforeEach() throws Exception { + // set back to default value + Config.max_scheduling_tablets = 2000; for (Table table : db.getTables()) { dropTable(table.getName(), true); } @@ -358,4 +362,52 @@ public void testColocateTabletHealth() throws Exception { dropTable(table.getName(), true); } + + @Test + public void testAddTabletNoDeadLock() throws Exception { + Config.max_scheduling_tablets = 1; + createTable("CREATE TABLE tbl3 (k INT) DISTRIBUTED BY HASH(k) BUCKETS 2" + + " PROPERTIES ('replication_num' = '3')"); + DebugPointUtil.addDebugPoint("MockedBackendFactory.handleCloneTablet.failed"); + OlapTable table = (OlapTable) db.getTableOrMetaException("tbl3"); + Partition partition = table.getPartitions().iterator().next(); + List tablets = partition.getMaterializedIndices(IndexExtState.ALL).iterator().next().getTablets(); + Assertions.assertEquals(2, tablets.size()); + + partition.updateVisibleVersion(10L); + tablets.forEach(tablet -> tablet.getReplicas().forEach(replica -> replica.updateVersion(10))); + + Tablet tabletA = tablets.get(0); + Tablet tabletB = tablets.get(1); + TabletScheduler scheduler = Env.getCurrentEnv().getTabletScheduler(); + tabletA.getReplicas().get(0).adminUpdateVersionInfo(8L, null, null, 0L); + checkTabletStatus(tabletA, TabletStatus.VERSION_INCOMPLETE, table, partition); + Env.getCurrentEnv().getTabletChecker().runAfterCatalogReady(); + Env.getCurrentEnv().getTabletScheduler().runAfterCatalogReady(); + Thread.sleep(1000); + MinMaxPriorityQueue queue = scheduler.getPendingTabletQueue(); + TabletSchedCtx tabletACtx = queue.peekFirst(); + Assertions.assertNotNull(tabletACtx); + tabletACtx.setLastVisitedTime(System.currentTimeMillis() + 3600 * 1000L); + tabletB.getReplicas().get(0).adminUpdateVersionInfo(8L, null, null, 0L); + checkTabletStatus(tabletB, TabletStatus.VERSION_INCOMPLETE, table, partition); + Thread thread = new Thread(() -> { + try { + Env.getCurrentEnv().getTabletChecker().runAfterCatalogReady(); + Env.getCurrentEnv().getTabletScheduler().runAfterCatalogReady(); + } catch (Exception e) { + e.printStackTrace(); + } + }); + thread.start(); + Thread.sleep(1000); + Assertions.assertTrue(table.tryWriteLock(2, TimeUnit.SECONDS)); + table.writeUnlock(); + DebugPointUtil.clearDebugPoints(); + doRepair(); + Thread.sleep(1000); + doRepair(); + checkTabletIsHealth(tabletA, table, partition); + checkTabletIsHealth(tabletB, table, partition); + } } diff --git a/fe/fe-core/src/test/java/org/apache/doris/datasource/ExternalCatalogTest.java b/fe/fe-core/src/test/java/org/apache/doris/datasource/ExternalCatalogTest.java index 43348ca8a0e6ef..f8e72c366b55f7 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/datasource/ExternalCatalogTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/datasource/ExternalCatalogTest.java @@ -22,9 +22,10 @@ import org.apache.doris.catalog.Env; import org.apache.doris.catalog.PrimitiveType; import org.apache.doris.common.FeConstants; +import org.apache.doris.common.FeMetaVersion; import org.apache.doris.datasource.hive.HMSExternalCatalog; import org.apache.doris.datasource.test.TestExternalCatalog; -import org.apache.doris.mysql.privilege.Auth; +import org.apache.doris.meta.MetaContext; import org.apache.doris.qe.ConnectContext; import org.apache.doris.qe.QueryState.MysqlStateType; import org.apache.doris.qe.StmtExecutor; @@ -32,16 +33,20 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import org.apache.hadoop.conf.Configuration; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.File; +import java.nio.file.Files; import java.util.HashMap; import java.util.List; import java.util.Map; public class ExternalCatalogTest extends TestWithFeService { - private static Auth auth; - private static Env env; + private Env env; private CatalogMgr mgr; private ConnectContext rootCtx; @@ -51,7 +56,6 @@ protected void runBeforeAll() throws Exception { mgr = Env.getCurrentEnv().getCatalogMgr(); rootCtx = createDefaultCtx(); env = Env.getCurrentEnv(); - auth = env.getAuth(); // 1. create test catalog CreateCatalogStmt testCatalog = (CreateCatalogStmt) parseAndAnalyzeStmt( "create catalog test1 properties(\n" @@ -244,4 +248,32 @@ public Map>> getMetadata() { return MOCKED_META; } } + + @Test + public void testSerialization() throws Exception { + MetaContext metaContext = new MetaContext(); + metaContext.setMetaVersion(FeMetaVersion.VERSION_CURRENT); + metaContext.setThreadLocalInfo(); + + // 1. Write objects to file + File file = new File("./external_catalog_persist_test.dat"); + file.createNewFile(); + DataOutputStream dos = new DataOutputStream(Files.newOutputStream(file.toPath())); + + TestExternalCatalog ctl = (TestExternalCatalog) mgr.getCatalog("test1"); + ctl.write(dos); + dos.flush(); + dos.close(); + + // 2. Read objects from file + DataInputStream dis = new DataInputStream(Files.newInputStream(file.toPath())); + + TestExternalCatalog ctl2 = (TestExternalCatalog) ExternalCatalog.read(dis); + Configuration conf = ctl2.getConfiguration(); + Assertions.assertNotNull(conf); + + // 3. delete files + dis.close(); + file.delete(); + } } diff --git a/fe/fe-core/src/test/java/org/apache/doris/datasource/iceberg/IcebergExternalTableTest.java b/fe/fe-core/src/test/java/org/apache/doris/datasource/iceberg/IcebergExternalTableTest.java index 80d0a7c2429df3..3ba4804e52279c 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/datasource/iceberg/IcebergExternalTableTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/datasource/iceberg/IcebergExternalTableTest.java @@ -28,17 +28,21 @@ import com.google.common.collect.Maps; import com.google.common.collect.Range; import mockit.Expectations; +import mockit.Mock; +import mockit.MockUp; import mockit.Mocked; import mockit.Verifications; import org.apache.iceberg.PartitionField; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; +import org.apache.iceberg.Table; import org.apache.iceberg.transforms.Days; import org.apache.iceberg.transforms.Hours; import org.apache.iceberg.transforms.Months; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; @@ -52,6 +56,16 @@ public void testIsSupportedPartitionTable(@Mocked org.apache.iceberg.Table icebe @Mocked Schema schema) { IcebergExternalTable table = new IcebergExternalTable(1, "1", "2", null); Map specs = Maps.newHashMap(); + new MockUp() { + @Mock + private void makeSureInitialized() { + } + + @Mock + public Table getIcebergTable() { + return icebergTable; + } + }; // Test null specs.put(0, null); new Expectations() {{ @@ -139,34 +153,35 @@ public void testGetPartitionRange() throws AnalysisException { table.setPartitionColumns(partitionColumns); // Test null partition value - Range nullRange = table.getPartitionRange(null, "hour"); - Assertions.assertFalse(nullRange.hasLowerBound()); - Assertions.assertEquals("0000-01-02 00:00:00", + Range nullRange = table.getPartitionRange(null, "hour", partitionColumns); + Assertions.assertEquals("0000-01-01 00:00:00", + nullRange.lowerEndpoint().getPartitionValuesAsStringList().get(0)); + Assertions.assertEquals("0000-01-01 00:00:01", nullRange.upperEndpoint().getPartitionValuesAsStringList().get(0)); // Test hour transform. - Range hour = table.getPartitionRange("100", "hour"); + Range hour = table.getPartitionRange("100", "hour", partitionColumns); PartitionKey lowKey = hour.lowerEndpoint(); PartitionKey upKey = hour.upperEndpoint(); Assertions.assertEquals("1970-01-05 04:00:00", lowKey.getPartitionValuesAsStringList().get(0)); Assertions.assertEquals("1970-01-05 05:00:00", upKey.getPartitionValuesAsStringList().get(0)); // Test day transform. - Range day = table.getPartitionRange("100", "day"); + Range day = table.getPartitionRange("100", "day", partitionColumns); lowKey = day.lowerEndpoint(); upKey = day.upperEndpoint(); Assertions.assertEquals("1970-04-11 00:00:00", lowKey.getPartitionValuesAsStringList().get(0)); Assertions.assertEquals("1970-04-12 00:00:00", upKey.getPartitionValuesAsStringList().get(0)); // Test month transform. - Range month = table.getPartitionRange("100", "month"); + Range month = table.getPartitionRange("100", "month", partitionColumns); lowKey = month.lowerEndpoint(); upKey = month.upperEndpoint(); Assertions.assertEquals("1978-05-01 00:00:00", lowKey.getPartitionValuesAsStringList().get(0)); Assertions.assertEquals("1978-06-01 00:00:00", upKey.getPartitionValuesAsStringList().get(0)); // Test year transform. - Range year = table.getPartitionRange("100", "year"); + Range year = table.getPartitionRange("100", "year", partitionColumns); lowKey = year.lowerEndpoint(); upKey = year.upperEndpoint(); Assertions.assertEquals("2070-01-01 00:00:00", lowKey.getPartitionValuesAsStringList().get(0)); @@ -174,7 +189,7 @@ public void testGetPartitionRange() throws AnalysisException { // Test unsupported transform Exception exception = Assertions.assertThrows(RuntimeException.class, () -> { - table.getPartitionRange("100", "bucket"); + table.getPartitionRange("100", "bucket", partitionColumns); }); Assertions.assertEquals("Unsupported transform bucket", exception.getMessage()); } @@ -183,15 +198,16 @@ public void testGetPartitionRange() throws AnalysisException { public void testSortRange() throws AnalysisException { IcebergExternalTable table = new IcebergExternalTable(1, "1", "2", null); Column c = new Column("c", PrimitiveType.DATETIMEV2); + ArrayList columns = Lists.newArrayList(c); table.setPartitionColumns(Lists.newArrayList(c)); - PartitionItem nullRange = new RangePartitionItem(table.getPartitionRange(null, "hour")); - PartitionItem year1970 = new RangePartitionItem(table.getPartitionRange("0", "year")); - PartitionItem year1971 = new RangePartitionItem(table.getPartitionRange("1", "year")); - PartitionItem month197002 = new RangePartitionItem(table.getPartitionRange("1", "month")); - PartitionItem month197103 = new RangePartitionItem(table.getPartitionRange("14", "month")); - PartitionItem month197204 = new RangePartitionItem(table.getPartitionRange("27", "month")); - PartitionItem day19700202 = new RangePartitionItem(table.getPartitionRange("32", "day")); - PartitionItem day19730101 = new RangePartitionItem(table.getPartitionRange("1096", "day")); + PartitionItem nullRange = new RangePartitionItem(table.getPartitionRange(null, "hour", columns)); + PartitionItem year1970 = new RangePartitionItem(table.getPartitionRange("0", "year", columns)); + PartitionItem year1971 = new RangePartitionItem(table.getPartitionRange("1", "year", columns)); + PartitionItem month197002 = new RangePartitionItem(table.getPartitionRange("1", "month", columns)); + PartitionItem month197103 = new RangePartitionItem(table.getPartitionRange("14", "month", columns)); + PartitionItem month197204 = new RangePartitionItem(table.getPartitionRange("27", "month", columns)); + PartitionItem day19700202 = new RangePartitionItem(table.getPartitionRange("32", "day", columns)); + PartitionItem day19730101 = new RangePartitionItem(table.getPartitionRange("1096", "day", columns)); Map map = Maps.newHashMap(); map.put("nullRange", nullRange); map.put("year1970", year1970); diff --git a/fe/fe-core/src/test/java/org/apache/doris/httpv2/CopyIntoTest.java b/fe/fe-core/src/test/java/org/apache/doris/httpv2/rest/CopyIntoTest.java similarity index 100% rename from fe/fe-core/src/test/java/org/apache/doris/httpv2/CopyIntoTest.java rename to fe/fe-core/src/test/java/org/apache/doris/httpv2/rest/CopyIntoTest.java diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/parser/NereidsParserTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/parser/NereidsParserTest.java index 9a46b810586eec..3ce7e64560ce1b 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/parser/NereidsParserTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/parser/NereidsParserTest.java @@ -448,7 +448,7 @@ public void testParseStmtType() { sql = "use a"; plan = nereidsParser.parseSingle(sql); - Assertions.assertEquals(plan.stmtType(), StmtType.OTHER); + Assertions.assertEquals(plan.stmtType(), StmtType.USE); sql = "CREATE TABLE tbl (`id` INT NOT NULL) DISTRIBUTED BY HASH(`id`) BUCKETS 1"; plan = nereidsParser.parseSingle(sql); @@ -463,10 +463,12 @@ public void testParseStmtType() { public void testParseUse() { NereidsParser nereidsParser = new NereidsParser(); String sql = "use db"; - nereidsParser.parseSingle(sql); + LogicalPlan logicalPlan = nereidsParser.parseSingle(sql); + Assertions.assertEquals(logicalPlan.stmtType(), StmtType.USE); sql = "use catalog.db"; - nereidsParser.parseSingle(sql); + LogicalPlan logicalPlan1 = nereidsParser.parseSingle(sql); + Assertions.assertEquals(logicalPlan1.stmtType(), StmtType.USE); } @Test diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyArithmeticComparisonRuleTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyArithmeticComparisonRuleTest.java index 4d932187611136..32857d4f4ae8fa 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyArithmeticComparisonRuleTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyArithmeticComparisonRuleTest.java @@ -21,70 +21,171 @@ import org.apache.doris.nereids.rules.expression.ExpressionRewriteTestHelper; import org.apache.doris.nereids.rules.expression.ExpressionRuleExecutor; import org.apache.doris.nereids.trees.expressions.Expression; -import org.apache.doris.nereids.trees.expressions.Slot; -import org.apache.doris.nereids.trees.expressions.SlotReference; -import org.apache.doris.nereids.types.IntegerType; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Maps; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import java.util.HashMap; -import java.util.Map; - class SimplifyArithmeticComparisonRuleTest extends ExpressionRewriteTestHelper { @Test - public void testProcess() { - Map nameToSlot = new HashMap<>(); - nameToSlot.put("a", new SlotReference("a", IntegerType.INSTANCE)); + public void testNumeric() { executor = new ExpressionRuleExecutor(ImmutableList.of( ExpressionRewrite.bottomUp(SimplifyArithmeticComparisonRule.INSTANCE) )); - assertRewriteAfterSimplify("a + 1 > 1", "a > cast((1 - 1) as INT)", nameToSlot); - assertRewriteAfterSimplify("a - 1 > 1", "a > cast((1 + 1) as INT)", nameToSlot); - assertRewriteAfterSimplify("a / -2 > 1", "cast((1 * -2) as INT) > a", nameToSlot); + + // test tinyint type + assertRewriteAfterSimplify("TA + 2 > 1", "cast(TA as SMALLINT) > (1 - 2)"); + assertRewriteAfterSimplify("TA - 2 > 1", "cast(TA as SMALLINT) > (1 + 2)"); + assertRewriteAfterSimplify("1 + TA > 2", "cast(TA as SMALLINT) > (2 - 1)"); + assertRewriteAfterSimplify("-1 + TA > 2", "cast(TA as SMALLINT) > (2 - (-1))"); + assertRewriteAfterSimplify("1 - TA > 2", "cast(TA as SMALLINT) < (1 - 2))"); + assertRewriteAfterSimplify("-1 - TA > 2", "cast(TA as SMALLINT) < ((-1) - 2)"); + assertRewriteAfterSimplify("2 * TA > 1", "((2 * TA) > 1)"); + assertRewriteAfterSimplify("-2 * TA > 1", "((-2 * TA) > 1)"); + assertRewriteAfterSimplify("2 / TA > 1", "((2 / TA) > 1)"); + assertRewriteAfterSimplify("-2 / TA > 1", "((-2 / TA) > 1)"); + assertRewriteAfterSimplify("TA * 2 > 1", "((TA * 2) > 1)"); + assertRewriteAfterSimplify("TA * (-2) > 1", "((TA * (-2)) > 1)"); + assertRewriteAfterSimplify("TA / 2 > 1", "cast(TA as SMALLINT) > (1 * 2)"); + assertRewriteAfterSimplify("TA / -2 > 1", "(1 * -2) > cast(TA as SMALLINT)"); + + // test integer type + assertRewriteAfterSimplify("IA + 2 > 1", "IA > cast((1 - 2) as INT)"); + assertRewriteAfterSimplify("IA - 2 > 1", "IA > cast((1 + 2) as INT)"); + assertRewriteAfterSimplify("1 + IA > 2", "IA > cast((2 - 1) as INT)"); + assertRewriteAfterSimplify("-1 + IA > 2", "IA > cast((2 - (-1)) as INT)"); + assertRewriteAfterSimplify("1 - IA > 2", "IA < cast((1 - 2) as INT)"); + assertRewriteAfterSimplify("-1 - IA > 2", "IA < cast(((-1) - 2) as INT)"); + assertRewriteAfterSimplify("2 * IA > 1", "((2 * IA) > 1)"); + assertRewriteAfterSimplify("-2 * IA > 1", "((-2 * IA) > 1)"); + assertRewriteAfterSimplify("2 / IA > 1", "((2 / IA) > 1)"); + assertRewriteAfterSimplify("-2 / IA > 1", "((-2 / IA) > 1)"); + assertRewriteAfterSimplify("IA * 2 > 1", "((IA * 2) > 1)"); + assertRewriteAfterSimplify("IA * (-2) > 1", "((IA * (-2)) > 1)"); + assertRewriteAfterSimplify("IA / 2 > 1", "(IA > cast((1 * 2) as INT))"); + assertRewriteAfterSimplify("IA / -2 > 1", "cast((1 * -2) as INT) > IA"); // test integer type - assertRewriteAfterSimplify("1 + a > 2", "a > cast((2 - 1) as INT)", nameToSlot); - assertRewriteAfterSimplify("-1 + a > 2", "a > cast((2 - (-1)) as INT)", nameToSlot); - assertRewriteAfterSimplify("1 - a > 2", "a < cast((1 - 2) as INT)", nameToSlot); - assertRewriteAfterSimplify("-1 - a > 2", "a < cast(((-1) - 2) as INT)", nameToSlot); - assertRewriteAfterSimplify("2 * a > 1", "((2 * a) > 1)", nameToSlot); - assertRewriteAfterSimplify("-2 * a > 1", "((-2 * a) > 1)", nameToSlot); - assertRewriteAfterSimplify("2 / a > 1", "((2 / a) > 1)", nameToSlot); - assertRewriteAfterSimplify("-2 / a > 1", "((-2 / a) > 1)", nameToSlot); - assertRewriteAfterSimplify("a * 2 > 1", "((a * 2) > 1)", nameToSlot); - assertRewriteAfterSimplify("a * (-2) > 1", "((a * (-2)) > 1)", nameToSlot); - assertRewriteAfterSimplify("a / 2 > 1", "(a > cast((1 * 2) as INT))", nameToSlot); + assertRewriteAfterSimplify("TA + 2 > 200", "cast(TA as INT) > (200 - 2)"); + assertRewriteAfterSimplify("TA - 2 > 200", "cast(TA as INT) > (200 + 2)"); + assertRewriteAfterSimplify("1 + TA > 200", "cast(TA as INT) > (200 - 1)"); + assertRewriteAfterSimplify("-1 + TA > 200", "cast(TA as INT) > (200 - (-1))"); + assertRewriteAfterSimplify("1 - TA > 200", "cast(TA as INT) < (1 - 200))"); + assertRewriteAfterSimplify("-1 - TA > 200", "cast(TA as INT) < ((-1) - 200)"); + assertRewriteAfterSimplify("2 * TA > 200", "((2 * TA) > 200)"); + assertRewriteAfterSimplify("-2 * TA > 200", "((-2 * TA) > 200)"); + assertRewriteAfterSimplify("2 / TA > 200", "((2 / TA) > 200)"); + assertRewriteAfterSimplify("-2 / TA > 200", "((-2 / TA) > 200)"); + assertRewriteAfterSimplify("TA * 2 > 200", "((TA * 2) > 200)"); + assertRewriteAfterSimplify("TA * (-2) > 200", "((TA * (-2)) > 200)"); + assertRewriteAfterSimplify("TA / 2 > 200", "cast(TA as INT) > (200 * 2)"); + assertRewriteAfterSimplify("TA / -2 > 200", "(200 * -2) > cast(TA as INT)"); // test decimal type - assertRewriteAfterSimplify("1.1 + a > 2.22", "(cast(a as DECIMALV3(12, 2)) > cast((2.22 - 1.1) as DECIMALV3(12, 2)))", nameToSlot); - assertRewriteAfterSimplify("-1.1 + a > 2.22", "(cast(a as DECIMALV3(12, 2)) > cast((2.22 - (-1.1)) as DECIMALV3(12, 2)))", nameToSlot); - assertRewriteAfterSimplify("1.1 - a > 2.22", "(cast(a as DECIMALV3(11, 1)) < cast((1.1 - 2.22) as DECIMALV3(11, 1)))", nameToSlot); - assertRewriteAfterSimplify("-1.1 - a > 2.22", "(cast(a as DECIMALV3(11, 1)) < cast((-1.1 - 2.22) as DECIMALV3(11, 1)))", nameToSlot); - assertRewriteAfterSimplify("2.22 * a > 1.1", "((2.22 * a) > 1.1)", nameToSlot); - assertRewriteAfterSimplify("-2.22 * a > 1.1", "-2.22 * a > 1.1", nameToSlot); - assertRewriteAfterSimplify("2.22 / a > 1.1", "((2.22 / a) > 1.1)", nameToSlot); - assertRewriteAfterSimplify("-2.22 / a > 1.1", "((-2.22 / a) > 1.1)", nameToSlot); - assertRewriteAfterSimplify("a * 2.22 > 1.1", "a * 2.22 > 1.1", nameToSlot); - assertRewriteAfterSimplify("a * (-2.22) > 1.1", "a * (-2.22) > 1.1", nameToSlot); - assertRewriteAfterSimplify("a / 2.22 > 1.1", "(cast(a as DECIMALV3(13, 3)) > cast((1.1 * 2.22) as DECIMALV3(13, 3)))", nameToSlot); - assertRewriteAfterSimplify("a / (-2.22) > 1.1", "(cast((1.1 * -2.22) as DECIMALV3(13, 3)) > cast(a as DECIMALV3(13, 3)))", nameToSlot); - - // test (1 + a) can be processed - assertRewriteAfterSimplify("2 - (1 + a) > 3", "(a < ((2 - 3) - 1))", nameToSlot); - assertRewriteAfterSimplify("(1 - a) / 2 > 3", "(a < (1 - 6))", nameToSlot); - assertRewriteAfterSimplify("1 - a / 2 > 3", "(a < ((1 - 3) * 2))", nameToSlot); - assertRewriteAfterSimplify("(1 - (a + 4)) / 2 > 3", "(cast(a as BIGINT) < ((1 - 6) - 4))", nameToSlot); - assertRewriteAfterSimplify("2 * (1 + a) > 1", "(2 * (1 + a)) > 1", nameToSlot); + assertRewriteAfterSimplify("1.1 + IA > 2.22", "(cast(IA as DECIMALV3(12, 2)) > cast((2.22 - 1.1) as DECIMALV3(12, 2)))"); + assertRewriteAfterSimplify("-1.1 + IA > 2.22", "(cast(IA as DECIMALV3(12, 2)) > cast((2.22 - (-1.1)) as DECIMALV3(12, 2)))"); + assertRewriteAfterSimplify("1.1 - IA > 2.22", "(cast(IA as DECIMALV3(11, 1)) < cast((1.1 - 2.22) as DECIMALV3(11, 1)))"); + assertRewriteAfterSimplify("-1.1 - IA > 2.22", "(cast(IA as DECIMALV3(11, 1)) < cast((-1.1 - 2.22) as DECIMALV3(11, 1)))"); + assertRewriteAfterSimplify("2.22 * IA > 1.1", "((2.22 * IA) > 1.1)"); + assertRewriteAfterSimplify("-2.22 * IA > 1.1", "-2.22 * IA > 1.1"); + assertRewriteAfterSimplify("2.22 / IA > 1.1", "((2.22 / IA) > 1.1)"); + assertRewriteAfterSimplify("-2.22 / IA > 1.1", "((-2.22 / IA) > 1.1)"); + assertRewriteAfterSimplify("IA * 2.22 > 1.1", "IA * 2.22 > 1.1"); + assertRewriteAfterSimplify("IA * (-2.22) > 1.1", "IA * (-2.22) > 1.1"); + assertRewriteAfterSimplify("IA / 2.22 > 1.1", "(cast(IA as DECIMALV3(13, 3)) > cast((1.1 * 2.22) as DECIMALV3(13, 3)))"); + assertRewriteAfterSimplify("IA / (-2.22) > 1.1", "(cast((1.1 * -2.22) as DECIMALV3(13, 3)) > cast(IA as DECIMALV3(13, 3)))"); + + // test (1 + IA) can be processed + assertRewriteAfterSimplify("2 - (1 + IA) > 3", "(IA < ((2 - 3) - 1))"); + assertRewriteAfterSimplify("(1 - IA) / 2 > 3", "(IA < (1 - 6))"); + assertRewriteAfterSimplify("1 - IA / 2 > 3", "(IA < ((1 - 3) * 2))"); + assertRewriteAfterSimplify("(1 - (IA + 4)) / 2 > 3", "(cast(IA as BIGINT) < ((1 - 6) - 4))"); + assertRewriteAfterSimplify("2 * (1 + IA) > 1", "(2 * (1 + IA)) > 1"); + + // test (IA + IB) can be processed + assertRewriteAfterSimplify("2 - (1 + (IA + IB)) > 3", "(IA + IB) < cast(((2 - 3) - 1) as BIGINT)"); + assertRewriteAfterSimplify("(1 - (IA + IB)) / 2 > 3", "(IA + IB) < cast((1 - 6) as BIGINT)"); + assertRewriteAfterSimplify("1 - (IA + IB) / 2 > 3", "(IA + IB) < cast(((1 - 3) * 2) as BIGINT)"); + assertRewriteAfterSimplify("2 * (1 + (IA + IB)) > 1", "(2 * (1 + (IA + IB))) > 1"); + } + + @Test + public void testDateLike() { + executor = new ExpressionRuleExecutor(ImmutableList.of( + bottomUp( + SimplifyArithmeticRule.INSTANCE, + SimplifyArithmeticComparisonRule.INSTANCE + ) + )); + + // test datetimev2 type + assertRewriteAfterTypeCoercion("years_add(AA, 1) > '2021-01-01 00:00:00'", "(years_add(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("years_sub(AA, 1) > '2021-01-01 00:00:00'", "(years_sub(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("months_add(AA, 1) > '2021-01-01 00:00:00'", "(months_add(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("months_sub(AA, 1) > '2021-01-01 00:00:00'", "(months_sub(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("weeks_add(AA, 1) > '2021-01-01 00:00:00'", "AA > weeks_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("weeks_sub(AA, 1) > '2021-01-01 00:00:00'", "AA > weeks_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("days_add(AA, 1) > '2021-01-01 00:00:00'", "AA > days_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("days_sub(AA, 1) > '2021-01-01 00:00:00'", "AA > days_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("hours_add(AA, 1) > '2021-01-01 00:00:00'", "AA > hours_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("hours_sub(AA, 1) > '2021-01-01 00:00:00'", "AA > hours_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_add(AA, 1) > '2021-01-01 00:00:00'", "AA > minutes_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_sub(AA, 1) > '2021-01-01 00:00:00'", "AA > minutes_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_add(AA, 1) > '2021-01-01 00:00:00'", "AA > seconds_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_sub(AA, 1) > '2021-01-01 00:00:00'", "AA > seconds_add('2021-01-01 00:00:00', 1)"); + + assertRewriteAfterTypeCoercion("years_add(AA, 1) > '2021-01-01'", "(years_add(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("years_sub(AA, 1) > '2021-01-01'", "(years_sub(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("months_add(AA, 1) > '2021-01-01'", "(months_add(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("months_sub(AA, 1) > '2021-01-01'", "(months_sub(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("weeks_add(AA, 1) > '2021-01-01'", "AA > weeks_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("weeks_sub(AA, 1) > '2021-01-01'", "AA > weeks_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("days_add(AA, 1) > '2021-01-01'", "AA > days_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("days_sub(AA, 1) > '2021-01-01'", "AA > days_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("hours_add(AA, 1) > '2021-01-01'", "AA > hours_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("hours_sub(AA, 1) > '2021-01-01'", "AA > hours_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_add(AA, 1) > '2021-01-01'", "AA > minutes_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_sub(AA, 1) > '2021-01-01'", "AA > minutes_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_add(AA, 1) > '2021-01-01'", "AA > seconds_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_sub(AA, 1) > '2021-01-01'", "AA > seconds_add('2021-01-01 00:00:00', 1)"); + + // test date type + assertRewriteAfterTypeCoercion("years_add(CA, 1) > '2021-01-01'", "years_add(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("years_sub(CA, 1) > '2021-01-01'", "years_sub(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("months_add(CA, 1) > '2021-01-01'", "months_add(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("months_sub(CA, 1) > '2021-01-01'", "months_sub(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("weeks_add(CA, 1) > '2021-01-01'", "CA > weeks_sub(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("weeks_sub(CA, 1) > '2021-01-01'", "CA > weeks_add(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("days_add(CA, 1) > '2021-01-01'", "CA > days_sub(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("days_sub(CA, 1) > '2021-01-01'", "CA > days_add(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("hours_add(CA, 1) > '2021-01-01'", "cast(CA as datetime) > hours_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("hours_sub(CA, 1) > '2021-01-01'", "cast(CA as datetime) > hours_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_add(CA, 1) > '2021-01-01'", "cast(CA as datetime) > minutes_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_sub(CA, 1) > '2021-01-01'", "cast(CA as datetime) > minutes_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_add(CA, 1) > '2021-01-01'", "cast(CA as datetime) > seconds_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_sub(CA, 1) > '2021-01-01'", "cast(CA as datetime) > seconds_add('2021-01-01 00:00:00', 1)"); + + assertRewriteAfterTypeCoercion("years_add(CA, 1) > '2021-01-01 00:00:00'", "years_add(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("years_sub(CA, 1) > '2021-01-01 00:00:00'", "years_sub(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("months_add(CA, 1) > '2021-01-01 00:00:00'", "months_add(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("months_sub(CA, 1) > '2021-01-01 00:00:00'", "months_sub(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("weeks_add(CA, 1) > '2021-01-01 00:00:00'", "CA > weeks_sub(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("weeks_sub(CA, 1) > '2021-01-01 00:00:00'", "CA > weeks_add(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("days_add(CA, 1) > '2021-01-01 00:00:00'", "CA > days_sub(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("days_sub(CA, 1) > '2021-01-01 00:00:00'", "CA > days_add(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("hours_add(CA, 1) > '2021-01-01 00:00:00'", "cast(CA as datetime) > hours_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("hours_sub(CA, 1) > '2021-01-01 00:00:00'", "cast(CA as datetime) > hours_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_add(CA, 1) > '2021-01-01 00:00:00'", "cast(CA as datetime) > minutes_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_sub(CA, 1) > '2021-01-01 00:00:00'", "cast(CA as datetime) > minutes_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_add(CA, 1) > '2021-01-01 00:00:00'", "cast(CA as datetime) > seconds_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_sub(CA, 1) > '2021-01-01 00:00:00'", "cast(CA as datetime) > seconds_add('2021-01-01 00:00:00', 1)"); } - private void assertRewriteAfterSimplify(String expr, String expected, Map slotNameToSlot) { + private void assertRewriteAfterSimplify(String expr, String expected) { Expression needRewriteExpression = PARSER.parseExpression(expr); - if (slotNameToSlot != null) { - needRewriteExpression = replaceUnboundSlot(needRewriteExpression, slotNameToSlot); - } + needRewriteExpression = replaceUnboundSlot(needRewriteExpression, Maps.newHashMap()); Expression rewritten = executor.rewrite(needRewriteExpression, context); Expression expectedExpression = PARSER.parseExpression(expected); Assertions.assertEquals(expectedExpression.toSql(), rewritten.toSql()); diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateSqlTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateSqlTest.java index 29889efdd6ce13..32b3706b290fbd 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateSqlTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateSqlTest.java @@ -17,14 +17,12 @@ package org.apache.doris.nereids.rules.expression.rules; -import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.trees.expressions.literal.NullLiteral; import org.apache.doris.nereids.types.DateTimeV2Type; import org.apache.doris.nereids.util.MemoPatternMatchSupported; import org.apache.doris.nereids.util.PlanChecker; import org.apache.doris.utframe.TestWithFeService; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; class SimplifyComparisonPredicateSqlTest extends TestWithFeService implements MemoPatternMatchSupported { @@ -153,17 +151,30 @@ void dateLikeOverflow() { ) ); - Assertions.assertThrows(AnalysisException.class, () -> PlanChecker.from(connectContext) + PlanChecker.from(connectContext) .analyze("select CAST('2021-01-32 00:00:00' AS DATETIME(6)) = '2021-01-32 00:00:00'") .rewrite() - ); - Assertions.assertThrows(AnalysisException.class, () -> PlanChecker.from(connectContext) + .matches(logicalOneRowRelation().when(oneRowRelation -> + oneRowRelation.getExpressions().get(0).child(0) instanceof NullLiteral) + ); + + PlanChecker.from(connectContext) + .analyze("select CAST('2021-01-32 00:00:00' AS DATETIME(6)) = '2021-01-32 00:00:00'") + .rewrite() + .matches(logicalOneRowRelation().when(oneRowRelation -> + oneRowRelation.getExpressions().get(0).child(0) instanceof NullLiteral) + ); + PlanChecker.from(connectContext) .analyze("select CAST('2021-01-32 00:00:00' AS DATETIME(6)) = '2021-01-32 23:00:00'") .rewrite() - ); - Assertions.assertThrows(AnalysisException.class, () -> PlanChecker.from(connectContext) + .matches(logicalOneRowRelation().when(oneRowRelation -> + oneRowRelation.getExpressions().get(0).child(0) instanceof NullLiteral) + ); + PlanChecker.from(connectContext) .analyze("select CAST('2021-01-32 00:00:00' AS DATETIME(6)) = '1000'") .rewrite() - ); + .matches(logicalOneRowRelation().when(oneRowRelation -> + oneRowRelation.getExpressions().get(0).child(0) instanceof NullLiteral) + ); } } diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateTest.java index 028f1c4864f099..9a36fb59b9f18d 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateTest.java @@ -40,6 +40,7 @@ import org.apache.doris.nereids.trees.expressions.literal.DateV2Literal; import org.apache.doris.nereids.trees.expressions.literal.DecimalV3Literal; import org.apache.doris.nereids.trees.expressions.literal.DoubleLiteral; +import org.apache.doris.nereids.trees.expressions.literal.FloatLiteral; import org.apache.doris.nereids.trees.expressions.literal.IntegerLiteral; import org.apache.doris.nereids.trees.expressions.literal.LargeIntLiteral; import org.apache.doris.nereids.trees.expressions.literal.NullLiteral; @@ -54,6 +55,7 @@ import org.apache.doris.nereids.types.DateV2Type; import org.apache.doris.nereids.types.DecimalV3Type; import org.apache.doris.nereids.types.DoubleType; +import org.apache.doris.nereids.types.FloatType; import org.apache.doris.nereids.types.IntegerType; import org.apache.doris.nereids.types.SmallIntType; import org.apache.doris.nereids.types.TinyIntType; @@ -166,6 +168,18 @@ void testDateTimeV2CmpDateTimeV2() { new LessThan(date, new DateV2Literal("2020-01-02"))); assertRewrite(new LessThanEqual(new Cast(date, DateTimeType.INSTANCE), new DateTimeLiteral("2020-01-01 00:00:01")), new LessThanEqual(date, new DateV2Literal("2020-01-01"))); + assertRewrite(new EqualTo(new Cast(date, DateTimeV2Type.SYSTEM_DEFAULT), new DateTimeV2Literal("2020-01-01 00:00:00")), + new EqualTo(date, new DateV2Literal("2020-01-01"))); + assertRewrite(new EqualTo(new Cast(date, DateTimeV2Type.SYSTEM_DEFAULT), new DateTimeV2Literal("2020-01-01 00:00:01")), + ExpressionUtils.falseOrNull(date)); + assertRewrite(new EqualTo(new Cast(date, DateTimeV2Type.of(2)), new DateTimeV2Literal("2020-01-01 00:00:00.01")), + ExpressionUtils.falseOrNull(date)); + assertRewrite(new NullSafeEqual(new Cast(date, DateTimeV2Type.of(2)), new DateTimeV2Literal("2020-01-01 00:00:00.01")), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThanEqual(new Cast(date, DateTimeV2Type.SYSTEM_DEFAULT), new DateTimeV2Literal("2020-01-01 00:00:01")), + new GreaterThanEqual(date, new DateV2Literal("2020-01-02"))); + assertRewrite(new GreaterThanEqual(new Cast(date, DateTimeV2Type.of(2)), new DateTimeV2Literal("2020-01-01 00:00:00.01")), + new GreaterThanEqual(date, new DateV2Literal("2020-01-02"))); // cast (date as datev1) = datev1-literal // assertRewrite(new EqualTo(new Cast(date, DateType.INSTANCE), new DateLiteral("2020-01-01")), // new EqualTo(date, new DateV2Literal("2020-01-01"))); @@ -191,6 +205,18 @@ void testDateTimeV2CmpDateTimeV2() { new EqualTo(datev1, new DateLiteral("2020-01-01"))); assertRewrite(new GreaterThan(new Cast(datev1, DateV2Type.INSTANCE), new DateV2Literal("2020-01-01")), new GreaterThan(datev1, new DateLiteral("2020-01-01"))); + assertRewrite(new EqualTo(new Cast(datev1, DateTimeV2Type.SYSTEM_DEFAULT), new DateTimeV2Literal("2020-01-01 00:00:00")), + new EqualTo(datev1, new DateLiteral("2020-01-01"))); + assertRewrite(new EqualTo(new Cast(datev1, DateTimeV2Type.SYSTEM_DEFAULT), new DateTimeV2Literal("2020-01-01 00:00:01")), + ExpressionUtils.falseOrNull(datev1)); + assertRewrite(new EqualTo(new Cast(datev1, DateTimeV2Type.of(2)), new DateTimeV2Literal("2020-01-01 00:00:00.01")), + ExpressionUtils.falseOrNull(datev1)); + assertRewrite(new NullSafeEqual(new Cast(datev1, DateTimeV2Type.of(2)), new DateTimeV2Literal("2020-01-01 00:00:00.01")), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThanEqual(new Cast(datev1, DateTimeV2Type.SYSTEM_DEFAULT), new DateTimeV2Literal("2020-01-01 00:00:01")), + new GreaterThanEqual(datev1, new DateLiteral("2020-01-02"))); + assertRewrite(new GreaterThanEqual(new Cast(datev1, DateTimeV2Type.of(2)), new DateTimeV2Literal("2020-01-01 00:00:00.01")), + new GreaterThanEqual(datev1, new DateLiteral("2020-01-02"))); // cast (datetimev1 as datetime) cmp datetime assertRewrite(new EqualTo(new Cast(datetimev1, DateTimeV2Type.of(0)), new DateTimeV2Literal("2020-01-01 00:00:00")), @@ -272,10 +298,197 @@ void testDoubleLiteral() { Expression rewrittenExpression = executor.rewrite(expression, context); Assertions.assertEquals(left.child(0).getDataType(), rewrittenExpression.child(1).getDataType()); Assertions.assertEquals(rewrittenExpression.child(0).getDataType(), rewrittenExpression.child(1).getDataType()); + + Expression tinyIntSlot = new SlotReference("a", TinyIntType.INSTANCE); + Expression smallIntSlot = new SlotReference("a", SmallIntType.INSTANCE); + Expression intSlot = new SlotReference("a", IntegerType.INSTANCE); + Expression bigIntSlot = new SlotReference("a", BigIntType.INSTANCE); + + // tiny int, literal not exceeds data type limit + assertRewrite(new EqualTo(new Cast(tinyIntSlot, FloatType.INSTANCE), new FloatLiteral(12.0f)), + new EqualTo(tinyIntSlot, new TinyIntLiteral((byte) 12))); + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.0f)), + new EqualTo(tinyIntSlot, new TinyIntLiteral((byte) 12))); + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThan(tinyIntSlot, new TinyIntLiteral((byte) 12))); + assertRewrite(new GreaterThanEqual(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThanEqual(tinyIntSlot, new TinyIntLiteral((byte) 13))); + assertRewrite(new LessThan(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThan(tinyIntSlot, new TinyIntLiteral((byte) 13))); + assertRewrite(new LessThanEqual(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThanEqual(tinyIntSlot, new TinyIntLiteral((byte) 12))); + + // tiny int, literal exceeds data type limit + assertRewrite(new EqualTo(new Cast(tinyIntSlot, FloatType.INSTANCE), new FloatLiteral(200.0f)), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.0f)), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.3f)), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.3f)), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.3f)), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new GreaterThanEqual(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.3f)), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new LessThan(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.3f)), + ExpressionUtils.trueOrNull(tinyIntSlot)); + assertRewrite(new LessThanEqual(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.3f)), + ExpressionUtils.trueOrNull(tinyIntSlot)); + + // small int + assertRewrite(new EqualTo(new Cast(smallIntSlot, FloatType.INSTANCE), new FloatLiteral(12.0f)), + new EqualTo(smallIntSlot, new SmallIntLiteral((short) 12))); + assertRewrite(new EqualTo(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.0f)), + new EqualTo(smallIntSlot, new SmallIntLiteral((short) 12))); + assertRewrite(new EqualTo(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + ExpressionUtils.falseOrNull(smallIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThan(smallIntSlot, new SmallIntLiteral((short) 12))); + assertRewrite(new GreaterThanEqual(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThanEqual(smallIntSlot, new SmallIntLiteral((short) 13))); + assertRewrite(new LessThan(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThan(smallIntSlot, new SmallIntLiteral((short) 13))); + assertRewrite(new LessThanEqual(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThanEqual(smallIntSlot, new SmallIntLiteral((short) 12))); + + // int + assertRewrite(new EqualTo(new Cast(intSlot, FloatType.INSTANCE), new FloatLiteral(12.0f)), + new EqualTo(intSlot, new IntegerLiteral(12))); + assertRewrite(new EqualTo(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.0f)), + new EqualTo(intSlot, new IntegerLiteral(12))); + assertRewrite(new EqualTo(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + ExpressionUtils.falseOrNull(intSlot)); + assertRewrite(new NullSafeEqual(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThan(intSlot, new IntegerLiteral(12))); + assertRewrite(new GreaterThanEqual(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThanEqual(intSlot, new IntegerLiteral(13))); + assertRewrite(new LessThan(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThan(intSlot, new IntegerLiteral(13))); + assertRewrite(new LessThanEqual(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThanEqual(intSlot, new IntegerLiteral(12))); + + // big int + assertRewrite(new EqualTo(new Cast(bigIntSlot, FloatType.INSTANCE), new FloatLiteral(12.0f)), + new EqualTo(bigIntSlot, new BigIntLiteral(12L))); + assertRewrite(new EqualTo(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.0f)), + new EqualTo(bigIntSlot, new BigIntLiteral(12L))); + assertRewrite(new EqualTo(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + ExpressionUtils.falseOrNull(bigIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThan(bigIntSlot, new BigIntLiteral(12L))); + assertRewrite(new GreaterThanEqual(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThanEqual(bigIntSlot, new BigIntLiteral(13L))); + assertRewrite(new LessThan(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThan(bigIntSlot, new BigIntLiteral(13L))); + assertRewrite(new LessThanEqual(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThanEqual(bigIntSlot, new BigIntLiteral(12L))); + } + + @Test + void testIntCmpDecimalV3Literal() { + executor = new ExpressionRuleExecutor(ImmutableList.of( + bottomUp(SimplifyComparisonPredicate.INSTANCE) + )); + + Expression tinyIntSlot = new SlotReference("a", TinyIntType.INSTANCE); + Expression smallIntSlot = new SlotReference("a", SmallIntType.INSTANCE); + Expression intSlot = new SlotReference("a", IntegerType.INSTANCE); + Expression bigIntSlot = new SlotReference("a", BigIntType.INSTANCE); + + // tiny int, literal not exceeds data type limit + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.0"))), + new EqualTo(tinyIntSlot, new TinyIntLiteral((byte) 12))); + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThan(tinyIntSlot, new TinyIntLiteral((byte) 12))); + assertRewrite(new GreaterThanEqual(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThanEqual(tinyIntSlot, new TinyIntLiteral((byte) 13))); + assertRewrite(new LessThan(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThan(tinyIntSlot, new TinyIntLiteral((byte) 13))); + assertRewrite(new LessThanEqual(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThanEqual(tinyIntSlot, new TinyIntLiteral((byte) 12))); + + // tiny int, literal exceeds data type limit + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.0"))), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.3"))), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.3"))), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.3"))), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new GreaterThanEqual(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.3"))), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new LessThan(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.3"))), + ExpressionUtils.trueOrNull(tinyIntSlot)); + assertRewrite(new LessThanEqual(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.3"))), + ExpressionUtils.trueOrNull(tinyIntSlot)); + + // small int + assertRewrite(new EqualTo(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.0"))), + new EqualTo(smallIntSlot, new SmallIntLiteral((short) 12))); + assertRewrite(new EqualTo(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + ExpressionUtils.falseOrNull(smallIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThan(smallIntSlot, new SmallIntLiteral((short) 12))); + assertRewrite(new GreaterThanEqual(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThanEqual(smallIntSlot, new SmallIntLiteral((short) 13))); + assertRewrite(new LessThan(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThan(smallIntSlot, new SmallIntLiteral((short) 13))); + assertRewrite(new LessThanEqual(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThanEqual(smallIntSlot, new SmallIntLiteral((short) 12))); + + // int + assertRewrite(new EqualTo(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.0"))), + new EqualTo(intSlot, new IntegerLiteral(12))); + assertRewrite(new EqualTo(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + ExpressionUtils.falseOrNull(intSlot)); + assertRewrite(new NullSafeEqual(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThan(intSlot, new IntegerLiteral(12))); + assertRewrite(new GreaterThanEqual(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThanEqual(intSlot, new IntegerLiteral(13))); + assertRewrite(new LessThan(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThan(intSlot, new IntegerLiteral(13))); + assertRewrite(new LessThanEqual(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThanEqual(intSlot, new IntegerLiteral(12))); + + // big int + assertRewrite(new EqualTo(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.0"))), + new EqualTo(bigIntSlot, new BigIntLiteral(12L))); + assertRewrite(new EqualTo(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + ExpressionUtils.falseOrNull(bigIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThan(bigIntSlot, new BigIntLiteral(12L))); + assertRewrite(new GreaterThanEqual(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThanEqual(bigIntSlot, new BigIntLiteral(13L))); + assertRewrite(new LessThan(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThan(bigIntSlot, new BigIntLiteral(13L))); + assertRewrite(new LessThanEqual(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThanEqual(bigIntSlot, new BigIntLiteral(12L))); } @Test - void testDecimalV3Literal() { + void testDecimalCmpDecimalV3Literal() { executor = new ExpressionRuleExecutor(ImmutableList.of( bottomUp(SimplifyComparisonPredicate.INSTANCE) )); diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/SelectReplaceTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/SelectReplaceTest.java index a002fe1a9c04a9..8279cd5cc4ffeb 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/SelectReplaceTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/SelectReplaceTest.java @@ -136,7 +136,7 @@ public void testParse() { )); // need select * - String sql3 = "seelct k1, k2, v1, v2 replace(k1 / 2 as k1) from t1"; + String sql3 = "select k1, k2, v1, v2 replace(k1 / 2 as k1) from t1"; Assertions.assertThrows(ParseException.class, () -> PlanChecker.from(MemoTestUtils.createConnectContext()) .checkParse(sql3, (checker) -> checker.matches( logicalProject( @@ -152,7 +152,7 @@ public void testParse() { .checkParse(sql4, (checker) -> checker.matches( logicalProject( logicalCheckPolicy( - unboundRelation() + logicalOneRowRelation() ) ) ))); @@ -162,9 +162,7 @@ public void testParse() { Assertions.assertThrows(ParseException.class, () -> PlanChecker.from(MemoTestUtils.createConnectContext()) .checkParse(sql5, (checker) -> checker.matches( logicalProject( - logicalCheckPolicy( - unboundRelation() - ) + logicalOneRowRelation() ) ))); diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteralTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteralTest.java index 8db1c9446d0c6d..786355c83b76c6 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteralTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteralTest.java @@ -23,6 +23,7 @@ import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; +import java.time.DateTimeException; import java.util.function.Consumer; class DateLiteralTest { @@ -67,7 +68,7 @@ void testDate() { new DateLiteral("2022-1-1"); new DateLiteral("20220101"); - Assertions.assertThrows(AnalysisException.class, () -> new DateLiteral("-01-01")); + Assertions.assertThrows(DateTimeException.class, () -> new DateLiteral("-01-01")); } @Test @@ -128,8 +129,8 @@ void testIrregularDate() { @Test void testWrongPunctuationDate() { - Assertions.assertThrows(AnalysisException.class, () -> new DateTimeV2Literal("2020€02€01")); - Assertions.assertThrows(AnalysisException.class, () -> new DateTimeV2Literal("2020【02】01")); + Assertions.assertThrows(DateTimeException.class, () -> new DateTimeV2Literal("2020€02€01")); + Assertions.assertThrows(DateTimeException.class, () -> new DateTimeV2Literal("2020【02】01")); } @Test diff --git a/fe/fe-core/src/test/java/org/apache/doris/utframe/MockedBackendFactory.java b/fe/fe-core/src/test/java/org/apache/doris/utframe/MockedBackendFactory.java index 9e8ff913ada8ac..1a9a175366e528 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/utframe/MockedBackendFactory.java +++ b/fe/fe-core/src/test/java/org/apache/doris/utframe/MockedBackendFactory.java @@ -95,6 +95,7 @@ import org.apache.thrift.TException; import java.io.IOException; +import java.util.Collections; import java.util.List; import java.util.Random; import java.util.concurrent.BlockingQueue; @@ -305,6 +306,10 @@ private void handleCloneTablet(TAgentTaskRequest request, TFinishTaskRequest fin tabletInfo.setPathHash(pathHash); tabletInfo.setUsed(true); tabletInfos.add(tabletInfo); + if (DebugPointUtil.isEnable("MockedBackendFactory.handleCloneTablet.failed")) { + finishTaskRequest.setTaskStatus(new TStatus(TStatusCode.CANCELLED)); + finishTaskRequest.getTaskStatus().setErrorMsgs(Collections.singletonList("debug point set")); + } finishTaskRequest.setFinishTabletInfos(tabletInfos); } diff --git a/gensrc/thrift/PaloInternalService.thrift b/gensrc/thrift/PaloInternalService.thrift index 6463935c5c8e88..39f3b65818f2b9 100644 --- a/gensrc/thrift/PaloInternalService.thrift +++ b/gensrc/thrift/PaloInternalService.thrift @@ -361,7 +361,9 @@ struct TQueryOptions { 142: optional bool enable_fixed_len_to_uint32_v2 = false; 143: optional bool enable_shared_exchange_sink_buffer = true; - 144: optional bool fuzzy_disable_runtime_filter_in_be = false; + 144: optional bool enable_inverted_index_searcher_cache = true; + 145: optional bool enable_inverted_index_query_cache = true; + 146: optional bool fuzzy_disable_runtime_filter_in_be = false; // For cloud, to control if the content would be written into file cache // In write path, to control if the content would be written into file cache. diff --git a/regression-test/data/external_table_p0/hive/test_hive_orc_predicate.out b/regression-test/data/external_table_p0/hive/test_hive_orc_predicate.out new file mode 100644 index 00000000000000..f42bb629550c88 --- /dev/null +++ b/regression-test/data/external_table_p0/hive/test_hive_orc_predicate.out @@ -0,0 +1,29 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !predicate_fixed_char1 -- +1 a + +-- !predicate_fixed_char2 -- + +-- !predicate_changed_type1 -- +1 Alice + +-- !predicate_changed_type2 -- +2 Bob + +-- !predicate_changed_type3 -- +3 Charlie + +-- !predicate_fixed_char1 -- +1 a + +-- !predicate_fixed_char2 -- + +-- !predicate_changed_type1 -- +1 Alice + +-- !predicate_changed_type2 -- +2 Bob + +-- !predicate_changed_type3 -- +3 Charlie + diff --git a/regression-test/data/fault_injection_p0/test_inverted_index_cache.out b/regression-test/data/fault_injection_p0/test_inverted_index_cache.out new file mode 100644 index 00000000000000..7d166b8b78d5d3 --- /dev/null +++ b/regression-test/data/fault_injection_p0/test_inverted_index_cache.out @@ -0,0 +1,22 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !sql -- +863 + +-- !sql -- +863 + +-- !sql -- +863 + +-- !sql -- +863 + +-- !sql -- +350 + +-- !sql -- +863 + +-- !sql -- +350 + diff --git a/regression-test/data/inverted_index_p0/test_inverted_index_v3.out b/regression-test/data/inverted_index_p0/test_inverted_index_v3.out index 9dc20f3e0e0a85..53f4eb7ae0a667 100644 --- a/regression-test/data/inverted_index_p0/test_inverted_index_v3.out +++ b/regression-test/data/inverted_index_p0/test_inverted_index_v3.out @@ -23,3 +23,15 @@ -- !sql -- 105 +-- !sql -- +238 + +-- !sql -- +104 + +-- !sql -- +104 + +-- !sql -- +105 + diff --git a/regression-test/data/mtmv_p0/test_iceberg_mtmv.out b/regression-test/data/mtmv_p0/test_iceberg_mtmv.out index c9d9799da81300..483ac0957e6f67 100644 --- a/regression-test/data/mtmv_p0/test_iceberg_mtmv.out +++ b/regression-test/data/mtmv_p0/test_iceberg_mtmv.out @@ -103,3 +103,18 @@ 2024-09-30 6 2024-10-28 7 +-- !refresh_one_partition -- +2024-01-01T00:00 4 + +-- !refresh_one_partition_rewrite -- +2024-01-01T00:00 4 +2024-01-02T00:00 3 + +-- !refresh_auto -- +2024-01-01T00:00 4 +2024-01-02T00:00 3 + +-- !refresh_all_partition_rewrite -- +2024-01-01T00:00 4 +2024-01-02T00:00 3 + diff --git a/regression-test/data/mtmv_p0/test_paimon_mtmv.out b/regression-test/data/mtmv_p0/test_paimon_mtmv.out index c28b7cb7baca22..ba6fc06c1d2491 100644 --- a/regression-test/data/mtmv_p0/test_paimon_mtmv.out +++ b/regression-test/data/mtmv_p0/test_paimon_mtmv.out @@ -111,3 +111,32 @@ false -- !not_partition_after -- true +-- !join_one_partition -- +1 2 a 1 2 +10 1 a \N \N +2 2 a \N \N +3 2 a \N \N +4 2 a \N \N +5 2 a \N \N +6 1 a \N \N +7 1 a \N \N +8 1 a \N \N +9 1 a \N \N + +-- !two_partition -- +1 2020-01-01 bj +2 2020-01-01 sh +3 2038-01-01 bj +4 2038-01-01 sh +5 2038-01-02 bj + +-- !limit_partition -- +3 2038-01-01 bj +4 2038-01-01 sh +5 2038-01-02 bj + +-- !null_partition -- +1 bj +4 null +5 NULL + diff --git a/regression-test/data/mtmv_p0/test_paimon_olap_rewrite_mtmv.out b/regression-test/data/mtmv_p0/test_paimon_olap_rewrite_mtmv.out new file mode 100644 index 00000000000000..09d23b7736e1d8 --- /dev/null +++ b/regression-test/data/mtmv_p0/test_paimon_olap_rewrite_mtmv.out @@ -0,0 +1,79 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !refresh_one_partition -- +1 2 a 1 2 +10 1 a \N \N +2 2 a \N \N +3 2 a \N \N +4 2 a \N \N +5 2 a \N \N +6 1 a \N \N +7 1 a \N \N +8 1 a \N \N +9 1 a \N \N + +-- !refresh_one_partition_rewrite -- +1 2 a 1 2 +1 2 b 1 2 +10 1 a \N \N +10 1 b \N \N +2 2 a \N \N +2 2 b \N \N +3 2 a \N \N +3 2 b \N \N +4 2 a \N \N +4 2 b \N \N +5 2 a \N \N +5 2 b \N \N +6 1 a \N \N +6 1 b \N \N +7 1 a \N \N +7 1 b \N \N +8 1 a \N \N +8 1 b \N \N +9 1 a \N \N +9 1 b \N \N + +-- !refresh_auto -- +1 2 a 1 2 +1 2 b 1 2 +10 1 a \N \N +10 1 b \N \N +2 2 a \N \N +2 2 b \N \N +3 2 a \N \N +3 2 b \N \N +4 2 a \N \N +4 2 b \N \N +5 2 a \N \N +5 2 b \N \N +6 1 a \N \N +6 1 b \N \N +7 1 a \N \N +7 1 b \N \N +8 1 a \N \N +8 1 b \N \N +9 1 a \N \N +9 1 b \N \N + +-- !refresh_all_partition_rewrite -- +1 2 a 1 2 +1 2 b 1 2 +10 1 a \N \N +10 1 b \N \N +2 2 a \N \N +2 2 b \N \N +3 2 a \N \N +3 2 b \N \N +4 2 a \N \N +4 2 b \N \N +5 2 a \N \N +5 2 b \N \N +6 1 a \N \N +6 1 b \N \N +7 1 a \N \N +7 1 b \N \N +8 1 a \N \N +8 1 b \N \N +9 1 a \N \N +9 1 b \N \N + diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q14.out b/regression-test/data/nereids_hint_tpch_p0/shape/q14.out deleted file mode 100644 index 3633709f96fa8a..00000000000000 --- a/regression-test/data/nereids_hint_tpch_p0/shape/q14.out +++ /dev/null @@ -1,20 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() ---------------PhysicalProject -----------------PhysicalOlapScan[part] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate < '1995-10-01') and (lineitem.l_shipdate >= '1995-09-01')) -------------------PhysicalOlapScan[lineitem] - -Hint log: -Used: leading(part lineitem ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q15.out b/regression-test/data/nereids_hint_tpch_p0/shape/q15.out deleted file mode 100644 index a88c5e699bd99d..00000000000000 --- a/regression-test/data/nereids_hint_tpch_p0/shape/q15.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((revenue0.total_revenue = max(total_revenue))) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = revenue0.supplier_no)) otherCondition=() -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------PhysicalOlapScan[lineitem] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -------------------------------PhysicalOlapScan[lineitem] - -Hint log: -Used: leading(supplier revenue0 ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/nereids_p0/ddl/use/use_command_nereids.out b/regression-test/data/nereids_p0/ddl/use/use_command_nereids.out new file mode 100644 index 00000000000000..17a7eaf6d7e12d --- /dev/null +++ b/regression-test/data/nereids_p0/ddl/use/use_command_nereids.out @@ -0,0 +1,13 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !show_tables_db1 -- +tb1 + +-- !show_tables_db2 -- +tb2 + +-- !show_tables_db1 -- +tb1 + +-- !show_tables_db2 -- +tb2 + diff --git a/regression-test/data/nereids_rules_p0/mv/nested/nested_mv_delete.out b/regression-test/data/nereids_rules_p0/mv/nested/nested_mv_delete.out new file mode 100644 index 00000000000000..65b48e2b8c1fce --- /dev/null +++ b/regression-test/data/nereids_rules_p0/mv/nested/nested_mv_delete.out @@ -0,0 +1,11 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !query_after_delete -- +1 1 o 10.50 2023-12-08 a b 1 yy \N +1 1 o 9.50 2023-12-08 a b 1 yy 1 +2 1 o 11.50 2023-12-09 a b 1 yy 2 +3 1 o 12.50 2023-12-10 a b 1 yy \N +3 1 o 33.50 2023-12-10 a b 1 yy 3 +4 2 o 43.20 2023-12-11 c d 2 mm \N +5 2 o 1.20 2023-12-12 c d 2 mi \N +5 2 o 56.20 2023-12-12 c d 2 mi 4 + diff --git a/regression-test/data/new_shapes_p0/clickbench/query1.out b/regression-test/data/new_shapes_p0/clickbench/query1.out deleted file mode 100644 index f98c53e3d5fc4e..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query1.out +++ /dev/null @@ -1,9 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_1 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalStorageLayerAggregate[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query10.out b/regression-test/data/new_shapes_p0/clickbench/query10.out deleted file mode 100644 index c784056436912a..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query10.out +++ /dev/null @@ -1,12 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query11.out b/regression-test/data/new_shapes_p0/clickbench/query11.out deleted file mode 100644 index 4b5e4486d3f4cc..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query11.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_11 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (MobilePhoneModel = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query12.out b/regression-test/data/new_shapes_p0/clickbench/query12.out deleted file mode 100644 index 10928363a83c02..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query12.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_12 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (MobilePhoneModel = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query13.out b/regression-test/data/new_shapes_p0/clickbench/query13.out deleted file mode 100644 index ce6675dc3bb26e..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query13.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_13 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query14.out b/regression-test/data/new_shapes_p0/clickbench/query14.out deleted file mode 100644 index 35eedce41b927a..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query14.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_14 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query15.out b/regression-test/data/new_shapes_p0/clickbench/query15.out deleted file mode 100644 index bf7f267f0e47be..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query15.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_15 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query16.out b/regression-test/data/new_shapes_p0/clickbench/query16.out deleted file mode 100644 index a229f5310dfc2d..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query16.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_16 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query17.out b/regression-test/data/new_shapes_p0/clickbench/query17.out deleted file mode 100644 index 78635481d04652..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query17.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_17 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query18.out b/regression-test/data/new_shapes_p0/clickbench/query18.out deleted file mode 100644 index 6af4a027d886c3..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query18.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_18 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query19.out b/regression-test/data/new_shapes_p0/clickbench/query19.out deleted file mode 100644 index 7540225b393218..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query19.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query2.out b/regression-test/data/new_shapes_p0/clickbench/query2.out deleted file mode 100644 index 4f4565a083c67b..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query2.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_2 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------filter(( not (AdvEngineID = 0))) -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query20.out b/regression-test/data/new_shapes_p0/clickbench/query20.out deleted file mode 100644 index 51f1da68b40a5b..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query20.out +++ /dev/null @@ -1,8 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_20 -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------filter((hits.UserID = 435090932899640449)) ---------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query21.out b/regression-test/data/new_shapes_p0/clickbench/query21.out deleted file mode 100644 index 104d1b4710532a..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query21.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_21 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------filter((URL like '%google%')) -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query22.out b/regression-test/data/new_shapes_p0/clickbench/query22.out deleted file mode 100644 index d5274c3548eb28..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query22.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_22 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = '')) and (URL like '%google%')) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query23.out b/regression-test/data/new_shapes_p0/clickbench/query23.out deleted file mode 100644 index 76a91b3ad49968..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query23.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_23 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = '')) and ( not (URL like '%.google.%')) and (Title like '%Google%')) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query24.out b/regression-test/data/new_shapes_p0/clickbench/query24.out deleted file mode 100644 index fd0a2f5b670727..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query24.out +++ /dev/null @@ -1,9 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_24 -- -PhysicalDeferMaterializeResultSink ---PhysicalDeferMaterializeTopN -----PhysicalDistribute[DistributionSpecGather] -------PhysicalDeferMaterializeTopN ---------filter((URL like '%google%')) -----------PhysicalDeferMaterializeOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query25.out b/regression-test/data/new_shapes_p0/clickbench/query25.out deleted file mode 100644 index 271149db672442..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query25.out +++ /dev/null @@ -1,11 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_25 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter(( not (SearchPhrase = ''))) ---------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query26.out b/regression-test/data/new_shapes_p0/clickbench/query26.out deleted file mode 100644 index 7317f810a3bb23..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query26.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_26 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(( not (SearchPhrase = ''))) -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query27.out b/regression-test/data/new_shapes_p0/clickbench/query27.out deleted file mode 100644 index 1dbae1e0dc1a8c..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query27.out +++ /dev/null @@ -1,11 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_27 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter(( not (SearchPhrase = ''))) ---------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query28.out b/regression-test/data/new_shapes_p0/clickbench/query28.out deleted file mode 100644 index e5cb28eab7aa2b..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query28.out +++ /dev/null @@ -1,14 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_28 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((c > 100000)) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(( not (URL = ''))) ---------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query29.out b/regression-test/data/new_shapes_p0/clickbench/query29.out deleted file mode 100644 index 01e642b5b4339f..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query29.out +++ /dev/null @@ -1,14 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_29 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((c > 100000)) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(( not (Referer = ''))) ---------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query3.out b/regression-test/data/new_shapes_p0/clickbench/query3.out deleted file mode 100644 index d4fb562b4fdb3f..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query3.out +++ /dev/null @@ -1,9 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_3 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query30.out b/regression-test/data/new_shapes_p0/clickbench/query30.out deleted file mode 100644 index bad1a26f517088..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query30.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_30 -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query31.out b/regression-test/data/new_shapes_p0/clickbench/query31.out deleted file mode 100644 index a662fac4ef4581..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query31.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_31 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query32.out b/regression-test/data/new_shapes_p0/clickbench/query32.out deleted file mode 100644 index 29828472ccab40..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query32.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_32 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query33.out b/regression-test/data/new_shapes_p0/clickbench/query33.out deleted file mode 100644 index f47fe46b46005a..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query33.out +++ /dev/null @@ -1,12 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_33 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query34.out b/regression-test/data/new_shapes_p0/clickbench/query34.out deleted file mode 100644 index c2b2ed43e72e9f..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query34.out +++ /dev/null @@ -1,12 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_34 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query35.out b/regression-test/data/new_shapes_p0/clickbench/query35.out deleted file mode 100644 index 12617f3936158c..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query35.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_35 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query36.out b/regression-test/data/new_shapes_p0/clickbench/query36.out deleted file mode 100644 index 2d49c7645c7528..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query36.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_36 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query37.out b/regression-test/data/new_shapes_p0/clickbench/query37.out deleted file mode 100644 index 757b4f64df4e98..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query37.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_37 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (URL = '')) and (hits.CounterID = 62) and (hits.DontCountHits = 0) and (hits.EventDate <= '2013-07-31') and (hits.EventDate >= '2013-07-01') and (hits.IsRefresh = 0)) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query38.out b/regression-test/data/new_shapes_p0/clickbench/query38.out deleted file mode 100644 index 37d0392a7b2490..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query38.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_38 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (Title = '')) and (hits.CounterID = 62) and (hits.DontCountHits = 0) and (hits.EventDate <= '2013-07-31') and (hits.EventDate >= '2013-07-01') and (hits.IsRefresh = 0)) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query39.out b/regression-test/data/new_shapes_p0/clickbench/query39.out deleted file mode 100644 index 89222c0f0d2abe..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query39.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_39 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (IsLink = 0)) and (hits.CounterID = 62) and (hits.EventDate <= '2013-07-31') and (hits.EventDate >= '2013-07-01') and (hits.IsDownload = 0) and (hits.IsRefresh = 0)) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query4.out b/regression-test/data/new_shapes_p0/clickbench/query4.out deleted file mode 100644 index 966b6cb7ecca97..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query4.out +++ /dev/null @@ -1,9 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_4 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query40.out b/regression-test/data/new_shapes_p0/clickbench/query40.out deleted file mode 100644 index d0f5babf3275e1..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query40.out +++ /dev/null @@ -1,14 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_40 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((hits.CounterID = 62) and (hits.EventDate <= '2013-07-31') and (hits.EventDate >= '2013-07-01') and (hits.IsRefresh = 0)) ---------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query41.out b/regression-test/data/new_shapes_p0/clickbench/query41.out deleted file mode 100644 index 8a7019e5969e79..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query41.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_41 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((hits.CounterID = 62) and (hits.EventDate <= '2013-07-31') and (hits.EventDate >= '2013-07-01') and (hits.IsRefresh = 0) and (hits.RefererHash = 3594120000172545465) and TraficSourceID IN (-1, 6)) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query42.out b/regression-test/data/new_shapes_p0/clickbench/query42.out deleted file mode 100644 index b4e8bce045c9c8..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query42.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_42 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((hits.CounterID = 62) and (hits.DontCountHits = 0) and (hits.EventDate <= '2013-07-31') and (hits.EventDate >= '2013-07-01') and (hits.IsRefresh = 0) and (hits.URLHash = 2868770270353813622)) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query43.out b/regression-test/data/new_shapes_p0/clickbench/query43.out deleted file mode 100644 index 80e197103fc1fb..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query43.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_43 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((hits.CounterID = 62) and (hits.DontCountHits = 0) and (hits.EventDate <= '2013-07-15') and (hits.EventDate >= '2013-07-14') and (hits.IsRefresh = 0)) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query5.out b/regression-test/data/new_shapes_p0/clickbench/query5.out deleted file mode 100644 index 94b8f2ad28f023..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query5.out +++ /dev/null @@ -1,11 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_5 -- -PhysicalResultSink ---hashAgg[DISTINCT_GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[DISTINCT_LOCAL] ---------hashAgg[GLOBAL] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query6.out b/regression-test/data/new_shapes_p0/clickbench/query6.out deleted file mode 100644 index 75ba24ac143f06..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query6.out +++ /dev/null @@ -1,12 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_6 -- -PhysicalResultSink ---hashAgg[DISTINCT_GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[DISTINCT_LOCAL] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query7.out b/regression-test/data/new_shapes_p0/clickbench/query7.out deleted file mode 100644 index 565f0c0f71985a..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query7.out +++ /dev/null @@ -1,9 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_7 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalStorageLayerAggregate[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query8.out b/regression-test/data/new_shapes_p0/clickbench/query8.out deleted file mode 100644 index 5bebd9361a2ca6..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query8.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_8 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (AdvEngineID = 0))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query9.out b/regression-test/data/new_shapes_p0/clickbench/query9.out deleted file mode 100644 index dcece9f0ce72d7..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query9.out +++ /dev/null @@ -1,12 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_9 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query1.out b/regression-test/data/new_shapes_p0/hint_tpcds/shape/query1.out deleted file mode 100644 index 401b9bd4b037c9..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query1.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_1 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------PhysicalProject -------------------filter((date_dim.d_year = 2000)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_store_sk = ctr2.ctr_store_sk)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF3 ctr_store_sk->[ctr_store_sk] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ctr_customer_sk->[c_customer_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store.s_store_sk = ctr1.ctr_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ctr_store_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((store.s_state = 'TN')) ---------------------------PhysicalOlapScan[store] - -Hint log: -Used: leading(store_returns broadcast date_dim ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query24.out b/regression-test/data/new_shapes_p0/hint_tpcds/shape/query24.out deleted file mode 100644 index 11fb1e7c9be3e6..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query24.out +++ /dev/null @@ -1,56 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_24 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF5 sr_ticket_number->[ss_ticket_number];RF6 sr_item_sk->[i_item_sk,ss_item_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store.s_zip = customer_address.ca_zip) and (store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk];RF3 ca_zip->[s_zip] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((store.s_market_id = 5)) ---------------------------------PhysicalOlapScan[store] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (c_birth_country = upper(ca_country)))) build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] apply RFs: RF6 -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------NestedLoopJoin[INNER_JOIN](cast(paid as DOUBLE) > cast((0.05 * avg(cast(netpaid as DECIMALV3(38, 4)))) as DOUBLE)) -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------filter((ssales.i_color = 'aquamarine')) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - -Hint log: -Used: leading(store_sales broadcast store shuffle { customer shuffle customer_address } shuffle item shuffle store_returns ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query64.out b/regression-test/data/new_shapes_p0/hint_tpcds/shape/query64.out deleted file mode 100644 index 26a67aa0d6e85a..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query64.out +++ /dev/null @@ -1,106 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_64 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF19 cs_item_sk->[i_item_sk,sr_item_sk,ss_item_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF18 p_promo_sk->[ss_promo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF17 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF16 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() build RFs:RF15 ib_income_band_sk->[hd_income_band_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() build RFs:RF14 ca_address_sk->[c_current_addr_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() build RFs:RF13 hd_demo_sk->[c_current_hdemo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF12 ca_address_sk->[ss_addr_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF10 sr_item_sk->[i_item_sk,ss_item_sk];RF11 sr_ticket_number->[ss_ticket_number] -------------------------------------------------PhysicalProject ---------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = hd1.hd_demo_sk)) otherCondition=() build RFs:RF9 hd_demo_sk->[ss_hdemo_sk] -----------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ss_item_sk] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[c_first_shipto_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF6 cd_demo_sk->[ss_cdemo_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] -------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 RF8 RF9 RF10 RF11 RF12 RF16 RF17 RF18 RF19 -------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[c_current_cdemo_sk] -----------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[c_first_sales_date_sk] ---------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 RF4 RF7 RF13 RF14 ---------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------------PhysicalProject ---------------------------------------------------------filter((item.i_current_price <= 58.00) and (item.i_current_price >= 49.00) and i_color IN ('blush', 'lace', 'lawn', 'misty', 'orange', 'pink')) -----------------------------------------------------------PhysicalOlapScan[item] apply RFs: RF10 RF19 -----------------------------------------------------PhysicalProject -------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd1.hd_income_band_sk = ib1.ib_income_band_sk)) otherCondition=() build RFs:RF2 ib_income_band_sk->[hd_income_band_sk] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF2 ---------------------------------------------------------PhysicalProject -----------------------------------------------------------PhysicalOlapScan[income_band] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF19 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[customer_address] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF15 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer_address] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[income_band] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1999, 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------PhysicalOlapScan[promotion] -----------------PhysicalProject -------------------filter((sale > (2 * refund))) ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF0 cr_item_sk->[cs_item_sk];RF1 cr_order_number->[cs_order_number] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((cs1.item_sk = cs2.item_sk) and (cs1.store_name = cs2.store_name) and (cs1.store_zip = cs2.store_zip)) otherCondition=((cs2.cnt <= cs1.cnt)) build RFs:RF20 item_sk->[item_sk];RF21 store_name->[store_name];RF22 store_zip->[store_zip] ---------------PhysicalProject -----------------filter((cs1.syear = 1999)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF20 RF21 RF22 ---------------PhysicalProject -----------------filter((cs2.syear = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - -Hint log: -Used: leading(catalog_sales shuffle catalog_returns ) leading({ store_sales { { customer d2 } cd2 } } cd1 d3 item { hd1 ib1 } store_returns ad1 hd2 ad2 ib2 d1 store promotion cs_ui ) leading(cs1 shuffle cs2 ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query67.out b/regression-test/data/new_shapes_p0/hint_tpcds/shape/query67.out deleted file mode 100644 index e93c8687236c29..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query67.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_67 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((rk <= 100)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalPartitionTopN -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1228) and (date_dim.d_month_seq >= 1217)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - -Hint log: -Used: leading(store_sales broadcast date_dim broadcast store broadcast item ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query72.out b/regression-test/data/new_shapes_p0/hint_tpcds/shape/query72.out deleted file mode 100644 index 33ba178690ab03..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query72.out +++ /dev/null @@ -1,59 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_72 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((warehouse.w_warehouse_sk = inventory.inv_warehouse_sk)) otherCondition=() build RFs:RF10 w_warehouse_sk->[inv_warehouse_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = inventory.inv_item_sk) and (inventory.inv_date_sk = d2.d_date_sk)) otherCondition=((inventory.inv_quantity_on_hand < catalog_sales.cs_quantity)) build RFs:RF8 d_date_sk->[inv_date_sk];RF9 cs_item_sk->[inv_item_sk] -----------------------PhysicalOlapScan[inventory] apply RFs: RF8 RF9 RF10 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_week_seq = d2.d_week_seq)) otherCondition=() build RFs:RF7 d_week_seq->[d_week_seq] ---------------------------PhysicalProject -----------------------------hashJoin[RIGHT_OUTER_JOIN bucketShuffle] hashCondition=((catalog_returns.cr_item_sk = catalog_sales.cs_item_sk) and (catalog_returns.cr_order_number = catalog_sales.cs_order_number)) otherCondition=() build RFs:RF5 cs_order_number->[cr_order_number];RF6 cs_item_sk->[cr_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF5 RF6 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[cs_item_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF3 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[cs_bill_hdemo_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_ship_date_sk = d3.d_date_sk) and (catalog_sales.cs_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_ship_date_sk];RF1 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 ---------------------------------------------------PhysicalProject -----------------------------------------------------NestedLoopJoin[INNER_JOIN](d3.d_date > days_add(d_date, INTERVAL 5 DAY)) -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------------PhysicalProject ---------------------------------------------------------filter((d1.d_year = 1998)) -----------------------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF7 -----------------------------------------------PhysicalProject -------------------------------------------------filter((household_demographics.hd_buy_potential = '1001-5000')) ---------------------------------------------------PhysicalOlapScan[household_demographics] -------------------------------------------PhysicalProject ---------------------------------------------filter((customer_demographics.cd_marital_status = 'S')) -----------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[promotion] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] - -Hint log: -Used: leading(inventory shuffle { catalog_returns shuffle { catalog_sales shuffle { d3 broadcast d1 } broadcast household_demographics shuffle customer_demographics broadcast promotion shuffle item } broadcast d2 } broadcast warehouse ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query78.out b/regression-test/data/new_shapes_p0/hint_tpcds/shape/query78.out deleted file mode 100644 index e57834b15ff42a..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query78.out +++ /dev/null @@ -1,62 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_78 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(OR[(coalesce(ws_qty, 0) > 0),(coalesce(cs_qty, 0) > 0)]) -------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((cs.cs_customer_sk = ss.ss_customer_sk) and (cs.cs_item_sk = ss.ss_item_sk) and (cs.cs_sold_year = ss.ss_sold_year)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.ws_customer_sk = ss.ss_customer_sk) and (ws.ws_item_sk = ss.ss_item_sk) and (ws.ws_sold_year = ss.ss_sold_year)) otherCondition=() -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((store_returns.sr_ticket_number = store_sales.ss_ticket_number) and (store_sales.ss_item_sk = store_returns.sr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_year = 1998)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((web_returns.wr_order_number = web_sales.ws_order_number) and (web_sales.ws_item_sk = web_returns.wr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_year = 1998)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_returns] ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((catalog_returns.cr_order_number = catalog_sales.cs_order_number) and (catalog_sales.cs_item_sk = catalog_returns.cr_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 1998)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_returns] - -Hint log: -Used: leading(web_sales broadcast date_dim web_returns ) leading(catalog_sales broadcast date_dim catalog_returns ) leading(store_sales broadcast date_dim store_returns ) leading(ss shuffle ws shuffle cs ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q10.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q10.out deleted file mode 100644 index 15d56e664547d1..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q10.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() -------------------PhysicalProject ---------------------filter((lineitem.l_returnflag = 'R')) -----------------------PhysicalOlapScan[lineitem] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = nation.n_nationkey)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] ---------------------------PhysicalProject -----------------------------filter((orders.o_orderdate < '1994-01-01') and (orders.o_orderdate >= '1993-10-01')) -------------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------PhysicalOlapScan[nation] - -Hint log: -Used: leading(lineitem shuffle { { customer shuffle orders } broadcast nation } ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q11.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q11.out deleted file mode 100644 index c37989eb189371..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q11.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------NestedLoopJoin[INNER_JOIN](cast(value as DOUBLE) > cast((sum((ps_supplycost * cast(ps_availqty as DECIMALV3(10, 0)))) * 0.000002) as DOUBLE)) -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] -------------------------PhysicalProject ---------------------------filter((nation.n_name = 'GERMANY')) -----------------------------PhysicalOlapScan[nation] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[partsupp] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[supplier] -----------------------------PhysicalProject -------------------------------filter((nation.n_name = 'GERMANY')) ---------------------------------PhysicalOlapScan[nation] - -Hint log: -Used: leading(partsupp { supplier nation } ) leading(partsupp { supplier nation } ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q12.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q12.out deleted file mode 100644 index a8710941069079..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q12.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate) and (lineitem.l_receiptdate < '1995-01-01') and (lineitem.l_receiptdate >= '1994-01-01') and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate < lineitem.l_commitdate) and l_shipmode IN ('MAIL', 'SHIP')) -----------------------PhysicalOlapScan[lineitem] - -Hint log: -Used: leading(orders lineitem ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q13.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q13.out deleted file mode 100644 index 99b5297e0d6a0d..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q13.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() -----------------------PhysicalProject -------------------------filter(( not (o_comment like '%special%requests%'))) ---------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] - -Hint log: -Used: leading(orders shuffle customer ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q15.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q15.out deleted file mode 100644 index a88c5e699bd99d..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q15.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((revenue0.total_revenue = max(total_revenue))) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = revenue0.supplier_no)) otherCondition=() -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------PhysicalOlapScan[lineitem] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -------------------------------PhysicalOlapScan[lineitem] - -Hint log: -Used: leading(supplier revenue0 ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q17.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q17.out deleted file mode 100644 index a84853ea177561..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q17.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(l_quantity as DECIMALV3(38, 5)) < (0.2 * avg(cast(l_quantity as DECIMALV3(17, 4))) OVER(PARTITION BY p_partkey)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[lineitem] -------------------------PhysicalProject ---------------------------filter((part.p_brand = 'Brand#23') and (part.p_container = 'MED BOX')) -----------------------------PhysicalOlapScan[part] - -Hint log: -Used: leading(lineitem broadcast part ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q19.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q19.out deleted file mode 100644 index ebd4f59d682f8f..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q19.out +++ /dev/null @@ -1,20 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=(OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(lineitem.l_quantity <= 11.00),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(lineitem.l_quantity >= 10.00),(lineitem.l_quantity <= 20.00),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG'),(lineitem.l_quantity >= 20.00)]]) -------------PhysicalProject ---------------filter((lineitem.l_quantity <= 30.00) and (lineitem.l_quantity >= 1.00) and (lineitem.l_shipinstruct = 'DELIVER IN PERSON') and l_shipmode IN ('AIR REG', 'AIR')) -----------------PhysicalOlapScan[lineitem] -------------PhysicalProject ---------------filter((part.p_size <= 15) and (part.p_size >= 1) and OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG')]] and p_brand IN ('Brand#12', 'Brand#23', 'Brand#34') and p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG', 'MED BAG', 'MED BOX', 'MED PACK', 'MED PKG', 'SM BOX', 'SM CASE', 'SM PACK', 'SM PKG')) -----------------PhysicalOlapScan[part] - -Hint log: -Used: leading(lineitem broadcast part ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q3.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q3.out deleted file mode 100644 index b284ef9355077d..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q3.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() ---------------PhysicalProject -----------------filter((lineitem.l_shipdate > '1995-03-15')) -------------------PhysicalOlapScan[lineitem] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1995-03-15')) -----------------------PhysicalOlapScan[orders] -------------------PhysicalProject ---------------------filter((customer.c_mktsegment = 'BUILDING')) -----------------------PhysicalOlapScan[customer] - -Hint log: -Used: leading(lineitem { orders shuffle customer } ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q4.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q4.out deleted file mode 100644 index 94b49c830c4b45..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q4.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate)) -----------------------PhysicalOlapScan[lineitem] -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1993-10-01') and (orders.o_orderdate >= '1993-07-01')) -----------------------PhysicalOlapScan[orders] - -Hint log: -Used: leading(lineitem orders ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q5.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q5.out deleted file mode 100644 index 1c001e63b89cfe..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q5.out +++ /dev/null @@ -1,39 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey) and (customer.c_nationkey = supplier.s_nationkey)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_suppkey = supplier.s_suppkey)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] ---------------------------PhysicalProject -----------------------------filter((orders.o_orderdate < '1995-01-01') and (orders.o_orderdate >= '1994-01-01')) -------------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[nation] -------------------------------PhysicalProject ---------------------------------filter((region.r_name = 'ASIA')) -----------------------------------PhysicalOlapScan[region] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] - -Hint log: -Used: leading(lineitem orders broadcast { supplier broadcast { nation broadcast region } } shuffle customer ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q7.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q7.out deleted file mode 100644 index 919b8547bb8a69..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q7.out +++ /dev/null @@ -1,40 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=(OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]]) -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() -----------------------PhysicalProject -------------------------filter((lineitem.l_shipdate <= '1996-12-31') and (lineitem.l_shipdate >= '1995-01-01')) ---------------------------PhysicalOlapScan[lineitem] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n1.n_nationkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------filter(n_name IN ('FRANCE', 'GERMANY')) -------------------------------PhysicalOlapScan[nation] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() -----------------------PhysicalProject -------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n2.n_nationkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] ---------------------------PhysicalProject -----------------------------filter(n_name IN ('FRANCE', 'GERMANY')) -------------------------------PhysicalOlapScan[nation] - -Hint log: -Used: leading(lineitem broadcast { supplier broadcast n1 } { orders shuffle { customer broadcast n2 } } ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q8.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q8.out deleted file mode 100644 index 486e40152fb644..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q8.out +++ /dev/null @@ -1,49 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n2.n_nationkey)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------filter((orders.o_orderdate <= '1996-12-31') and (orders.o_orderdate >= '1995-01-01')) -------------------------------------PhysicalOlapScan[orders] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[lineitem] -------------------------------------PhysicalProject ---------------------------------------filter((part.p_type = 'ECONOMY ANODIZED STEEL')) -----------------------------------------PhysicalOlapScan[part] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n1.n_nationkey)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((n1.n_regionkey = region.r_regionkey)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[nation] -------------------------------------PhysicalProject ---------------------------------------filter((region.r_name = 'AMERICA')) -----------------------------------------PhysicalOlapScan[region] ---------------------PhysicalProject -----------------------PhysicalOlapScan[nation] - -Hint log: -Used: leading(supplier { orders { lineitem broadcast part } { customer broadcast { n1 broadcast region } } } broadcast n2 ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q9.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q9.out deleted file mode 100644 index 42e3f4eb2072cd..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q9.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[orders] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] -------------------------------PhysicalProject ---------------------------------filter((p_name like '%green%')) -----------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[nation] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] - -Hint log: -Used: leading(orders shuffle { lineitem shuffle part } shuffle { supplier broadcast nation } shuffle partsupp ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/flat.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/flat.out deleted file mode 100644 index 3a180194ef57b5..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/flat.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((s.s_suppkey = l.lo_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[lo_suppkey] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_custkey = l.lo_custkey)) otherCondition=() build RFs:RF1 c_custkey->[lo_custkey] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((p.p_partkey = l.lo_partkey)) otherCondition=() build RFs:RF0 p_partkey->[lo_partkey] -----------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 -----------------PhysicalOlapScan[part] -------------PhysicalOlapScan[customer] ---------PhysicalOlapScan[supplier] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.1.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.1.out deleted file mode 100644 index de05bf1c72f84f..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.1.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF0 d_datekey->[lo_orderdate] -------------PhysicalProject ---------------filter((lineorder.lo_discount <= 3) and (lineorder.lo_discount >= 1) and (lineorder.lo_quantity < 25)) -----------------PhysicalOlapScan[lineorder] apply RFs: RF0 -------------PhysicalProject ---------------filter((dates.d_year = 1993)) -----------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.2.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.2.out deleted file mode 100644 index a43ea0bce0fbf6..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.2.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF0 d_datekey->[lo_orderdate] -------------PhysicalProject ---------------filter((lineorder.lo_discount <= 6) and (lineorder.lo_discount >= 4) and (lineorder.lo_quantity <= 35) and (lineorder.lo_quantity >= 26)) -----------------PhysicalOlapScan[lineorder] apply RFs: RF0 -------------PhysicalProject ---------------filter((dates.d_yearmonth = 'Jan1994')) -----------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.3.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.3.out deleted file mode 100644 index 7775cb114f7e58..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.3.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF0 d_datekey->[lo_orderdate] -------------PhysicalProject ---------------filter((lineorder.lo_discount <= 7) and (lineorder.lo_discount >= 5) and (lineorder.lo_quantity <= 35) and (lineorder.lo_quantity >= 26)) -----------------PhysicalOlapScan[lineorder] apply RFs: RF0 -------------PhysicalProject ---------------filter((dates.d_weeknuminyear = 6) and (dates.d_year = 1994)) -----------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.1.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.1.out deleted file mode 100644 index c1f86cac10185d..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.1.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[lo_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[lo_partkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((part.p_category = 'MFGR#12')) -------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------filter((supplier.s_region = 'AMERICA')) ---------------------------PhysicalOlapScan[supplier] -------------------PhysicalProject ---------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.2.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.2.out deleted file mode 100644 index 5b7b82f23355a2..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.2.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[lo_partkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((supplier.s_region = 'ASIA')) -------------------------------PhysicalOlapScan[supplier] -----------------------PhysicalProject -------------------------filter((part.p_brand <= 'MFGR#2228') and (part.p_brand >= 'MFGR#2221')) ---------------------------PhysicalOlapScan[part] -------------------PhysicalProject ---------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.3.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.3.out deleted file mode 100644 index 0523fe55e5bbc9..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.3.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[lo_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[lo_partkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((part.p_brand = 'MFGR#2239')) -------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------filter((supplier.s_region = 'EUROPE')) ---------------------------PhysicalOlapScan[supplier] -------------------PhysicalProject ---------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.1.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.1.out deleted file mode 100644 index 40096b292e84e7..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.1.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF1 c_custkey->[lo_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((supplier.s_region = 'ASIA')) -------------------------------PhysicalOlapScan[supplier] -----------------------PhysicalProject -------------------------filter((customer.c_region = 'ASIA')) ---------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter((dates.d_year <= 1997) and (dates.d_year >= 1992)) -----------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.2.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.2.out deleted file mode 100644 index 7d0b454caac190..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.2.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF1 c_custkey->[lo_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((supplier.s_nation = 'UNITED STATES')) -------------------------------PhysicalOlapScan[supplier] -----------------------PhysicalProject -------------------------filter((customer.c_nation = 'UNITED STATES')) ---------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter((dates.d_year <= 1997) and (dates.d_year >= 1992)) -----------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.3.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.3.out deleted file mode 100644 index 628f3df9a5831a..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.3.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF1 c_custkey->[lo_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter(s_city IN ('UNITED KI1', 'UNITED KI5')) -------------------------------PhysicalOlapScan[supplier] -----------------------PhysicalProject -------------------------filter(c_city IN ('UNITED KI1', 'UNITED KI5')) ---------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter((dates.d_year <= 1997) and (dates.d_year >= 1992)) -----------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.4.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.4.out deleted file mode 100644 index f725ccdbc2c1f7..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.4.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF1 c_custkey->[lo_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter(s_city IN ('UNITED KI1', 'UNITED KI5')) -------------------------------PhysicalOlapScan[supplier] -----------------------PhysicalProject -------------------------filter(c_city IN ('UNITED KI1', 'UNITED KI5')) ---------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter((dates.d_yearmonth = 'Dec1997')) -----------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.1.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.1.out deleted file mode 100644 index 63d8d12e64f165..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.1.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF3 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_partkey = part.p_partkey)) otherCondition=() build RFs:RF2 p_partkey->[lo_partkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF1 c_custkey->[lo_custkey] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((supplier.s_region = 'AMERICA')) -----------------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------filter((customer.c_region = 'AMERICA')) -------------------------------PhysicalOlapScan[customer] -----------------------PhysicalProject -------------------------filter(p_mfgr IN ('MFGR#1', 'MFGR#2')) ---------------------------PhysicalOlapScan[part] -------------------PhysicalProject ---------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.2.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.2.out deleted file mode 100644 index efc1e0061ed88d..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.2.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_partkey = part.p_partkey)) otherCondition=() build RFs:RF3 p_partkey->[lo_partkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF2 c_custkey->[lo_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF1 d_datekey->[lo_orderdate] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((supplier.s_region = 'AMERICA')) -----------------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1997, 1998)) -------------------------------PhysicalOlapScan[dates] -----------------------PhysicalProject -------------------------filter((customer.c_region = 'AMERICA')) ---------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter(p_mfgr IN ('MFGR#1', 'MFGR#2')) -----------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.3.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.3.out deleted file mode 100644 index 5bd9ad4782cb3d..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.3.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF3 lo_custkey->[c_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[lo_partkey] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 -------------------------------PhysicalProject ---------------------------------filter((supplier.s_nation = 'UNITED STATES')) -----------------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------filter((part.p_category = 'MFGR#14')) -------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------filter(d_year IN (1997, 1998)) ---------------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/constraints/query23.out b/regression-test/data/new_shapes_p0/tpcds_sf100/constraints/query23.out deleted file mode 100644 index 923ee0e2966746..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/constraints/query23.out +++ /dev/null @@ -1,81 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_23 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((cnt > 4)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter(d_year IN (2000, 2001, 2002, 2003)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#2 ) -----PhysicalCteProducer ( cteId=CTEId#2 ) -------PhysicalProject ---------NestedLoopJoin[INNER_JOIN](cast(ssales as DOUBLE) > cast((0.9500 * tpcds_cmax) as DOUBLE)) -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter(( not ss_customer_sk IS NULL)) -----------------------PhysicalOlapScan[store_sales] -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------filter(( not ss_customer_sk IS NULL)) -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalLimit[GLOBAL] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF5 item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 RF5 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -------------------PhysicalProject ---------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF8 item_sk->[ws_item_sk] -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF7 c_customer_sk->[ws_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 RF8 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.out deleted file mode 100644 index 64854df932af65..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_1 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------PhysicalProject -------------------filter((date_dim.d_year = 2000)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ctr1.ctr_store_sk = ctr2.ctr_store_sk)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = ctr1.ctr_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ctr_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter((store.s_state = 'SD')) -----------------------PhysicalOlapScan[store] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.out deleted file mode 100644 index 5fb40519b131d5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[c_current_addr_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() -------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() ---------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter(ca_county IN ('Cochran County', 'Kandiyohi County', 'Marquette County', 'Storey County', 'Warren County')) -------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.out deleted file mode 100644 index 8cab83d94f65ac..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_11 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000) > if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2002) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.dyear = 2002) and (t_s_secyear.sale_type = 's')) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.dyear = 2001) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.00)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 2001) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.00)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.out deleted file mode 100644 index b4c126ae67aebf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_12 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter(i_category IN ('Books', 'Men', 'Sports')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1998-05-06') and (date_dim.d_date >= '1998-04-06')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.out deleted file mode 100644 index 7b5d0d01ba6a17..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_13 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('KS', 'MI', 'SD'),(store_sales.ss_net_profit >= 100.00),(store_sales.ss_net_profit <= 200.00)],AND[ca_state IN ('CO', 'MO', 'ND'),(store_sales.ss_net_profit >= 150.00)],AND[ca_state IN ('NH', 'OH', 'TX'),(store_sales.ss_net_profit <= 250.00)]]) build RFs:RF3 ca_address_sk->[ss_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=(OR[AND[(household_demographics.hd_dep_count = 1),cd_marital_status IN ('M', 'S'),cd_education_status IN ('4 yr Degree', 'College'),OR[AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00),(household_demographics.hd_dep_count = 3)]]) build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=() build RFs:RF1 cd_demo_sk->[ss_cdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_net_profit <= 300.00) and (store_sales.ss_net_profit >= 50.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree')]] and cd_education_status IN ('4 yr Degree', 'College', 'Unknown') and cd_marital_status IN ('D', 'M', 'S')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter(hd_dep_count IN (1, 3)) -------------------------PhysicalOlapScan[household_demographics] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('CO', 'KS', 'MI', 'MO', 'ND', 'NH', 'OH', 'SD', 'TX')) ---------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------filter((date_dim.d_year = 2001)) -----------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.out deleted file mode 100644 index 10192bf86cb782..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.out +++ /dev/null @@ -1,154 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_14 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_brand_id = t.brand_id) and (item.i_category_id = t.category_id) and (item.i_class_id = t.class_id)) otherCondition=() ---------PhysicalIntersect -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = iws.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((d3.d_year <= 2002) and (d3.d_year >= 2000)) -------------------------PhysicalOlapScan[date_dim] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = ics.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((d2.d_year <= 2002) and (d2.d_year >= 2000)) -------------------------PhysicalOlapScan[date_dim] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = iss.i_item_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((d1.d_year <= 2002) and (d1.d_year >= 2000)) -------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------PhysicalProject -------------------PhysicalUnion ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 -----------------PhysicalProject -------------------filter((date_dim.d_year <= 2002) and (date_dim.d_year >= 2000)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalRepeat -----------------------PhysicalUnion -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF12 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() ---------------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cross_items.ss_item_sk)) otherCondition=() -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF12 -----------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF15 d_date_sk->[cs_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = cross_items.ss_item_sk)) otherCondition=() ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF15 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[item] ---------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF18 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = cross_items.ss_item_sk)) otherCondition=() ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF18 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[item] ---------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.out deleted file mode 100644 index 5825559155b8e7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_15 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),ca_state IN ('CA', 'GA', 'WA'),(catalog_sales.cs_sales_price > 500.00)]) -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((date_dim.d_qoy = 1) and (date_dim.d_year = 2001)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.out deleted file mode 100644 index c6e88456a7e402..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_16 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF3 cc_call_center_sk->[cs_call_center_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[cs_ship_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_ship_date_sk] ---------------------------hashJoin[LEFT_ANTI_JOIN bucketShuffle] hashCondition=((cs1.cs_order_number = cr1.cr_order_number)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((cs1.cs_order_number = cs2.cs_order_number)) otherCondition=(( not (cs_warehouse_sk = cs_warehouse_sk))) build RFs:RF0 cs_order_number->[cs_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-05-31') and (date_dim.d_date >= '2002-04-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'WV')) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter(cc_county IN ('Barrow County', 'Daviess County', 'Luce County', 'Richland County', 'Ziebach County')) -----------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.out deleted file mode 100644 index c10cc616923d3c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_17 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[sr_returned_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk,ss_customer_sk];RF4 cs_item_sk->[sr_item_sk,ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF6 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF7 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((d1.d_quarter_name = '2001Q1')) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.out deleted file mode 100644 index dcda7d5d7eb3ee..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_18 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[cs_bill_customer_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF5 -------------------------------------------PhysicalProject ---------------------------------------------filter((cd1.cd_education_status = 'Advanced Degree') and (cd1.cd_gender = 'F')) -----------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------PhysicalProject -----------------------------------------filter(c_birth_month IN (1, 10, 2, 4, 7, 8)) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------filter(ca_state IN ('GA', 'IN', 'ME', 'NC', 'OK', 'WA', 'WY')) -----------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 1998)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.out deleted file mode 100644 index 4e2627d552dd1e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=(( not (substring(ca_zip, 1, 5) = substring(s_zip, 1, 5)))) ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 2)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.out deleted file mode 100644 index 8fac9bc6bbbd76..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.out +++ /dev/null @@ -1,39 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_2 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = wscs.sold_date_sk)) otherCondition=() ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF2 d_week_seq->[d_week_seq] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 53))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 1999)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF2 ---------------PhysicalProject -----------------filter((date_dim.d_year = 1998)) -------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.out deleted file mode 100644 index fa360d9c6fdc83..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_20 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter(i_category IN ('Books', 'Shoes', 'Women')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-02-25') and (date_dim.d_date >= '2002-01-26')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.out deleted file mode 100644 index db506f0acaa0e9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_21 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) <= 1.5) and (if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) >= cast((2.000000 / 3.0) as DOUBLE))) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[inv_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = inventory.inv_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -----------------------------PhysicalOlapScan[inventory] apply RFs: RF1 RF2 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[warehouse] -------------------------PhysicalProject ---------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((date_dim.d_date <= '2002-03-29') and (date_dim.d_date >= '2002-01-28')) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.out deleted file mode 100644 index 7f10ebd7894ce7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_22 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[inventory] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_month_seq <= 1199) and (date_dim.d_month_seq >= 1188)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.out deleted file mode 100644 index c5d202bfee2bc3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.out +++ /dev/null @@ -1,81 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_23 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((cnt > 4)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------PhysicalOlapScan[date_dim] ---PhysicalCteAnchor ( cteId=CTEId#2 ) -----PhysicalCteProducer ( cteId=CTEId#2 ) -------PhysicalProject ---------NestedLoopJoin[INNER_JOIN](cast(ssales as DOUBLE) > cast((0.9500 * tpcds_cmax) as DOUBLE)) -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter(( not ss_customer_sk IS NULL)) -----------------------PhysicalOlapScan[store_sales] -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------filter(( not ss_customer_sk IS NULL)) -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalLimit[GLOBAL] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = frequent_ss_items.item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 -------------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------PhysicalProject -------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF7 ws_item_sk->[item_sk] ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 -------------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -----------------------PhysicalProject -------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.out deleted file mode 100644 index ace0813e2e7117..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.out +++ /dev/null @@ -1,52 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_24 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_zip = customer_address.ca_zip) and (store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_zip->[ca_zip];RF6 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (c_birth_country = upper(ca_country)))) build RFs:RF4 ca_address_sk->[c_current_addr_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF6 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] apply RFs: RF5 -----------------PhysicalProject -------------------filter((store.s_market_id = 8)) ---------------------PhysicalOlapScan[store] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------NestedLoopJoin[INNER_JOIN](cast(paid as DOUBLE) > cast((0.05 * avg(cast(netpaid as DECIMALV3(38, 4)))) as DOUBLE)) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------filter((ssales.i_color = 'beige')) -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.out deleted file mode 100644 index 80edfc46e41f5d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_25 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[sr_returned_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk,ss_customer_sk];RF4 cs_item_sk->[sr_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF6 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF7 ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] -------------------------------PhysicalProject ---------------------------------filter((d1.d_moy = 4) and (d1.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter((d2.d_moy <= 10) and (d2.d_moy >= 4) and (d2.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((d3.d_moy <= 10) and (d3.d_moy >= 4) and (d3.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.out deleted file mode 100644 index 52f628f8b600a2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_26 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF3 p_promo_sk->[cs_promo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'Unknown') and (customer_demographics.cd_gender = 'M') and (customer_demographics.cd_marital_status = 'S')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) -----------------------PhysicalOlapScan[promotion] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.out deleted file mode 100644 index 886eca75570635..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_27 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 RF3 -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_education_status = 'Secondary') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'D')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(s_state IN ('AL', 'LA', 'MI', 'MO', 'SC', 'TN')) ---------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.out deleted file mode 100644 index 7a6bdd8868ef00..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_28 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------NestedLoopJoin[CROSS_JOIN] ---------PhysicalLimit[LOCAL] -----------NestedLoopJoin[CROSS_JOIN] -------------PhysicalLimit[LOCAL] ---------------NestedLoopJoin[CROSS_JOIN] -----------------PhysicalLimit[LOCAL] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------PhysicalLimit[LOCAL] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 5) and (store_sales.ss_quantity >= 0) and OR[AND[(store_sales.ss_list_price >= 131.00),(store_sales.ss_list_price <= 141.00)],AND[(store_sales.ss_coupon_amt >= 16798.00),(store_sales.ss_coupon_amt <= 17798.00)],AND[(store_sales.ss_wholesale_cost >= 25.00),(store_sales.ss_wholesale_cost <= 45.00)]]) -------------------------------------PhysicalOlapScan[store_sales] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 10) and (store_sales.ss_quantity >= 6) and OR[AND[(store_sales.ss_list_price >= 145.00),(store_sales.ss_list_price <= 155.00)],AND[(store_sales.ss_coupon_amt >= 14792.00),(store_sales.ss_coupon_amt <= 15792.00)],AND[(store_sales.ss_wholesale_cost >= 46.00),(store_sales.ss_wholesale_cost <= 66.00)]]) -------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalLimit[LOCAL] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 15) and (store_sales.ss_quantity >= 11) and OR[AND[(store_sales.ss_list_price >= 150.00),(store_sales.ss_list_price <= 160.00)],AND[(store_sales.ss_coupon_amt >= 6600.00),(store_sales.ss_coupon_amt <= 7600.00)],AND[(store_sales.ss_wholesale_cost >= 9.00),(store_sales.ss_wholesale_cost <= 29.00)]]) ---------------------------------PhysicalOlapScan[store_sales] -----------------PhysicalLimit[LOCAL] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 16) and OR[AND[(store_sales.ss_list_price >= 91.00),(store_sales.ss_list_price <= 101.00)],AND[(store_sales.ss_coupon_amt >= 13493.00),(store_sales.ss_coupon_amt <= 14493.00)],AND[(store_sales.ss_wholesale_cost >= 36.00),(store_sales.ss_wholesale_cost <= 56.00)]]) -----------------------------PhysicalOlapScan[store_sales] -------------PhysicalLimit[LOCAL] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 25) and (store_sales.ss_quantity >= 21) and OR[AND[(store_sales.ss_list_price >= 0.00),(store_sales.ss_list_price <= 10.00)],AND[(store_sales.ss_coupon_amt >= 7629.00),(store_sales.ss_coupon_amt <= 8629.00)],AND[(store_sales.ss_wholesale_cost >= 6.00),(store_sales.ss_wholesale_cost <= 26.00)]]) -------------------------PhysicalOlapScan[store_sales] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 30) and (store_sales.ss_quantity >= 26) and OR[AND[(store_sales.ss_list_price >= 89.00),(store_sales.ss_list_price <= 99.00)],AND[(store_sales.ss_coupon_amt >= 15257.00),(store_sales.ss_coupon_amt <= 16257.00)],AND[(store_sales.ss_wholesale_cost >= 31.00),(store_sales.ss_wholesale_cost <= 51.00)]]) ---------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.out deleted file mode 100644 index b09148bd528c7b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_29 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[sr_returned_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk,ss_customer_sk];RF4 cs_item_sk->[sr_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF6 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF7 ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] -------------------------------PhysicalProject ---------------------------------filter((d1.d_moy = 4) and (d1.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter((d2.d_moy <= 7) and (d2.d_moy >= 4) and (d2.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(d_year IN (1999, 2000, 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.out deleted file mode 100644 index 8beaf9b74953fb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_3 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((dt.d_moy = 11)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter((item.i_manufact_id = 816)) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.out deleted file mode 100644 index 6f1b848ed00034..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_30 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[wr_returned_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_returns.wr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_returns] apply RFs: RF1 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((date_dim.d_year = 2002)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ctr_customer_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------PhysicalProject ---------------------filter((customer_address.ca_state = 'IN')) -----------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.out deleted file mode 100644 index 6c244739da9ca8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.out +++ /dev/null @@ -1,65 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_31 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -------------------PhysicalProject ---------------------PhysicalOlapScan[customer_address] ---------------PhysicalProject -----------------filter((ss.d_year = 2000) and d_qoy IN (1, 2, 3)) -------------------PhysicalOlapScan[date_dim] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_sold_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_sales] apply RFs: RF3 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((ws.d_year = 2000) and d_qoy IN (1, 2, 3)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalQuickSort[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalQuickSort[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws3.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF8 ca_county->[ca_county,ca_county,ca_county,ca_county] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws2.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF7 ca_county->[ca_county,ca_county,ca_county] ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss1.ca_county = ws1.ca_county)) otherCondition=() build RFs:RF6 ca_county->[ca_county,ca_county] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss2.ca_county = ss3.ca_county)) otherCondition=() build RFs:RF5 ca_county->[ca_county,ca_county] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ss1.ca_county = ss2.ca_county)) otherCondition=() build RFs:RF4 ca_county->[ca_county] -----------------------------PhysicalProject -------------------------------filter((ss1.d_qoy = 1) and (ss1.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF6 RF7 RF8 -----------------------------PhysicalProject -------------------------------filter((ss2.d_qoy = 2) and (ss2.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((ss3.d_qoy = 3) and (ss3.d_year = 2000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------PhysicalProject -------------------------filter((ws1.d_qoy = 1) and (ws1.d_year = 2000)) ---------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF7 RF8 ---------------------PhysicalProject -----------------------filter((ws2.d_qoy = 2) and (ws2.d_year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF8 -----------------PhysicalProject -------------------filter((ws3.d_qoy = 3) and (ws3.d_year = 2000)) ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.out deleted file mode 100644 index 7992f57d1c87b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_32 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------filter((cast(cs_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(cs_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) -----------------PhysicalWindow -------------------PhysicalQuickSort[LOCAL_SORT] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((item.i_manufact_id = 29)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-04-07') and (date_dim.d_date >= '1999-01-07')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.out deleted file mode 100644 index 0416c3f39cb340..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_33 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF0 i_manufact_id->[i_manufact_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF4 i_manufact_id->[i_manufact_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF8 i_manufact_id->[i_manufact_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.out deleted file mode 100644 index 15e7650bae15c3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_34 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -------------filter((dn.cnt <= 20) and (dn.cnt >= 15)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 28) and (date_dim.d_dom >= 1) and OR[(date_dim.d_dom <= 3),(date_dim.d_dom >= 25)] and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Franklin Parish', 'Luce County', 'Richland County', 'Walker County', 'Williamson County', 'Ziebach County')) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.2) and hd_buy_potential IN ('0-500', '1001-5000')) -----------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.out deleted file mode 100644 index 6def6ef536b340..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_35 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() -------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() ---------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer_demographics] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.out deleted file mode 100644 index 5e091a8245be48..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_36 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_year = 2002)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_state IN ('AL', 'GA', 'MI', 'MO', 'OH', 'SC', 'SD', 'TN')) -------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.out deleted file mode 100644 index 2dba5f8dad05c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_37 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-04-22') and (date_dim.d_date >= '1999-02-21')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((item.i_current_price <= 75.00) and (item.i_current_price >= 45.00) and i_manufact_id IN (1000, 707, 747, 856)) ---------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.out deleted file mode 100644 index b0bda7f927c138..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.out +++ /dev/null @@ -1,50 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_38 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------PhysicalIntersect -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.out deleted file mode 100644 index d906073878075f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_39 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((if((mean = 0.0), 0.0, (stdev / mean)) > 1.0)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[inv_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalOlapScan[inventory] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------PhysicalOlapScan[warehouse] -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1998) and d_moy IN (1, 2)) -----------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------hashJoin[INNER_JOIN shuffle] hashCondition=((inv1.i_item_sk = inv2.i_item_sk) and (inv1.w_warehouse_sk = inv2.w_warehouse_sk)) otherCondition=() build RFs:RF3 i_item_sk->[i_item_sk];RF4 w_warehouse_sk->[w_warehouse_sk] -------------filter((inv1.d_moy = 1)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------filter((inv2.d_moy = 2)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.out deleted file mode 100644 index a083e5a72ef86a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.out +++ /dev/null @@ -1,75 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_4 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF8 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2000) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF7 customer_id->[customer_id,customer_id,customer_id,customer_id] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_c_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF6 customer_id->[customer_id] -----------------------PhysicalProject -------------------------filter((t_c_secyear.dyear = 2000) and (t_c_secyear.sale_type = 'c')) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 RF7 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_c_firstyear.customer_id)) otherCondition=() build RFs:RF5 customer_id->[customer_id,customer_id] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id] -----------------------------PhysicalProject -------------------------------filter((t_s_secyear.dyear = 2000) and (t_s_secyear.sale_type = 's')) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF7 -----------------------------PhysicalProject -------------------------------filter((t_s_firstyear.dyear = 1999) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.000000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF7 ---------------------------PhysicalProject -----------------------------filter((t_c_firstyear.dyear = 1999) and (t_c_firstyear.sale_type = 'c') and (t_c_firstyear.year_total > 0.000000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 1999) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.000000)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.out deleted file mode 100644 index 1465471817388b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_40 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF2 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) ---------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter((date_dim.d_date <= '2001-05-02') and (date_dim.d_date >= '2001-03-03')) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.out deleted file mode 100644 index 3034a77fe0897a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_41 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_manufact = i1.i_manufact)) otherCondition=() build RFs:RF0 i_manufact->[i_manufact] -------------------PhysicalProject ---------------------filter((i1.i_manufact_id <= 788) and (i1.i_manufact_id >= 748)) -----------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((item_cnt > 0)) -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter(OR[AND[i_color IN ('aquamarine', 'blue', 'chartreuse', 'chiffon', 'dodger', 'gainsboro', 'tan', 'violet'),i_units IN ('Bunch', 'Dozen', 'Each', 'Ounce', 'Oz', 'Pound', 'Ton', 'Tsp'),OR[AND[(item.i_category = 'Women'),i_color IN ('aquamarine', 'gainsboro'),i_units IN ('Dozen', 'Ounce'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('chiffon', 'violet'),i_units IN ('Pound', 'Ton'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('blue', 'chartreuse'),i_units IN ('Each', 'Oz'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('dodger', 'tan'),i_units IN ('Bunch', 'Tsp'),i_size IN ('economy', 'medium')]]],AND[i_color IN ('almond', 'blanched', 'indian', 'lime', 'peru', 'saddle', 'spring', 'tomato'),i_units IN ('Box', 'Carton', 'Case', 'Dram', 'Gram', 'Pallet', 'Tbl', 'Unknown'),OR[AND[(item.i_category = 'Women'),i_color IN ('blanched', 'tomato'),i_units IN ('Case', 'Tbl'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('almond', 'lime'),i_units IN ('Box', 'Dram'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('peru', 'saddle'),i_units IN ('Gram', 'Pallet'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('indian', 'spring'),i_units IN ('Carton', 'Unknown'),i_size IN ('economy', 'medium')]]]] and i_category IN ('Men', 'Women') and i_size IN ('N/A', 'economy', 'extra large', 'large', 'medium', 'small')) ---------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.out deleted file mode 100644 index 8b26911e6afcaa..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_42 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((dt.d_moy = 11) and (dt.d_year = 2002)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((item.i_manager_id = 1)) -----------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.out deleted file mode 100644 index 37ab89010ef0a9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_43 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -5.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.out deleted file mode 100644 index c2cc91b7f43043..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.out +++ /dev/null @@ -1,69 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_44 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((i2.i_item_sk = descending.item_sk)) otherCondition=() build RFs:RF1 item_sk->[i_item_sk] -------------PhysicalProject ---------------PhysicalOlapScan[item] apply RFs: RF1 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((i1.i_item_sk = asceding.item_sk)) otherCondition=() build RFs:RF0 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((asceding.rnk = descending.rnk)) otherCondition=() ---------------------PhysicalProject -----------------------filter((rnk < 11)) -------------------------PhysicalWindow ---------------------------PhysicalQuickSort[MERGE_SORT] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------PhysicalPartitionTopN -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) ---------------------------------------PhysicalProject -----------------------------------------hashAgg[GLOBAL] -------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------hashAgg[LOCAL] -----------------------------------------------PhysicalProject -------------------------------------------------filter((ss1.ss_store_sk = 146)) ---------------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------------PhysicalProject -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------hashAgg[GLOBAL] -------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------PhysicalProject -------------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) ---------------------------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalProject -----------------------filter((rnk < 11)) -------------------------PhysicalWindow ---------------------------PhysicalQuickSort[MERGE_SORT] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------PhysicalPartitionTopN -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) ---------------------------------------PhysicalProject -----------------------------------------hashAgg[GLOBAL] -------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------hashAgg[LOCAL] -----------------------------------------------PhysicalProject -------------------------------------------------filter((ss1.ss_store_sk = 146)) ---------------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------------PhysicalProject -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------hashAgg[GLOBAL] -------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------PhysicalProject -------------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) ---------------------------------------------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.out deleted file mode 100644 index 7270fe9092a53b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_45 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),$c$1]) -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter(i_item_sk IN (11, 13, 17, 19, 2, 23, 29, 3, 5, 7)) -----------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.out deleted file mode 100644 index 37d045d1ebc4a1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_46 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter(d_dow IN (0, 6) and d_year IN (1999, 2000, 2001)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Centerville', 'Fairview', 'Five Points', 'Liberty', 'Oak Grove')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 6),(household_demographics.hd_vehicle_count = 0)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.out deleted file mode 100644 index 77dcc9357f98b8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_47 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter(OR[(date_dim.d_year = 2001),AND[(date_dim.d_year = 2000),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2002),(date_dim.d_moy = 1)]] and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1)) and (v1.s_company_name = v1_lead.s_company_name) and (v1.s_store_name = v1_lead.s_store_name)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1)) and (v1.s_company_name = v1_lag.s_company_name) and (v1.s_store_name = v1_lag.s_store_name)) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 s_store_name->[s_store_name];RF6 s_company_name->[s_company_name];RF7 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 2001)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.out deleted file mode 100644 index aef568a41842bb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_48 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('IA', 'MD', 'MN'),(store_sales.ss_net_profit <= 2000.00)],AND[ca_state IN ('IL', 'TX', 'VA'),(store_sales.ss_net_profit >= 150.00),(store_sales.ss_net_profit <= 3000.00)],AND[ca_state IN ('IN', 'MI', 'WI'),(store_sales.ss_net_profit >= 50.00)]]) build RFs:RF2 ca_address_sk->[ss_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00)],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]) build RFs:RF1 cd_demo_sk->[ss_cdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------filter((store_sales.ss_net_profit <= 25000.00) and (store_sales.ss_net_profit >= 0.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree')]] and cd_education_status IN ('2 yr Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'U', 'W')) -------------------------PhysicalOlapScan[customer_demographics] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('IA', 'IL', 'IN', 'MD', 'MI', 'MN', 'TX', 'VA', 'WI')) ---------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------filter((date_dim.d_year = 1999)) -----------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.out deleted file mode 100644 index 0db3fa841189b2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.out +++ /dev/null @@ -1,107 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_49 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((ws.ws_item_sk = wr.wr_item_sk) and (ws.ws_order_number = wr.wr_order_number)) otherCondition=() build RFs:RF0 wr_order_number->[ws_order_number];RF1 wr_item_sk->[ws_item_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((ws.ws_net_paid > 0.00) and (ws.ws_net_profit > 1.00) and (ws.ws_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((wr.wr_return_amt > 10000.00)) -------------------------------------------------------------------PhysicalOlapScan[web_returns] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((cs.cs_item_sk = cr.cr_item_sk) and (cs.cs_order_number = cr.cr_order_number)) otherCondition=() build RFs:RF3 cr_order_number->[cs_order_number];RF4 cr_item_sk->[cs_item_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cs.cs_net_paid > 0.00) and (cs.cs_net_profit > 1.00) and (cs.cs_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 RF5 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cr.cr_return_amount > 10000.00)) -------------------------------------------------------------------PhysicalOlapScan[catalog_returns] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sts.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ss_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((sts.ss_item_sk = sr.sr_item_sk) and (sts.ss_ticket_number = sr.sr_ticket_number)) otherCondition=() build RFs:RF6 sr_ticket_number->[ss_ticket_number];RF7 sr_item_sk->[ss_item_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sts.ss_net_paid > 0.00) and (sts.ss_net_profit > 1.00) and (sts.ss_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sr.sr_return_amt > 10000.00)) -------------------------------------------------------------------PhysicalOlapScan[store_returns] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.out deleted file mode 100644 index 5187455761becc..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.out +++ /dev/null @@ -1,77 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_5 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.store_sk = store.s_store_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk,ss_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cr_returned_date_sk,cs_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF3 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_page] ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.wsr_web_site_sk = web_site.web_site_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk,ws_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_returns.wr_item_sk = web_sales.ws_item_sk) and (web_returns.wr_order_number = web_sales.ws_order_number)) otherCondition=() build RFs:RF4 wr_item_sk->[ws_item_sk];RF5 wr_order_number->[ws_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.out deleted file mode 100644 index 8bdf05df39ed98..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_50 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[sr_returned_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_ticket_number->[ss_ticket_number];RF1 sr_item_sk->[ss_item_sk];RF2 sr_customer_sk->[ss_customer_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] apply RFs: RF5 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] -----------------------PhysicalProject -------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((d2.d_moy = 8) and (d2.d_year = 2001)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.out deleted file mode 100644 index 470fabc0f31e81..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_51 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((web_cumulative > store_cumulative)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((web.d_date = store.d_date) and (web.item_sk = store.item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.out deleted file mode 100644 index 15a333bc25275d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_52 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((dt.d_moy = 12) and (dt.d_year = 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter((item.i_manager_id = 1)) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.out deleted file mode 100644 index 89dc632eb527c4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_53 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_quarterly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_quarterly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_quarterly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.out deleted file mode 100644 index de4dbe32cd1f80..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.out +++ /dev/null @@ -1,72 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_54 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) <= (d_month_seq + 3)) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) >= (d_month_seq + 1)) -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_county = store.s_county) and (customer_address.ca_state = store.s_state)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((my_customers.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((my_customers.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 ---------------------------------------------PhysicalProject -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_customer_sk = cs_or_ws_sales.customer_sk)) otherCondition=() -----------------------------------------------------PhysicalProject -------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ws_item_sk] -------------------------------------------------------------PhysicalUnion ---------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 ---------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------filter((item.i_category = 'Women') and (item.i_class = 'maternity')) -----------------------------------------------------------------PhysicalOlapScan[item] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------PhysicalProject -------------------------------------------------------PhysicalOlapScan[customer] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[customer_address] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalAssertNumRows -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalAssertNumRows ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) ---------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.out deleted file mode 100644 index 20d097ea52f2e0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_55 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter((item.i_manager_id = 100)) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.out deleted file mode 100644 index ef5a9edba623fc..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_56 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.out deleted file mode 100644 index 96d8f68090e5de..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_57 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((call_center.cc_call_center_sk = catalog_sales.cs_call_center_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter(OR[(date_dim.d_year = 1999),AND[(date_dim.d_year = 1998),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2000),(date_dim.d_moy = 1)]] and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[call_center] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((v1.cc_name = v1_lead.cc_name) and (v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1))) otherCondition=() build RFs:RF7 i_category->[i_category];RF8 i_brand->[i_brand];RF9 cc_name->[cc_name];RF10 rn->[(rn - 1)] -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 RF8 RF9 RF10 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.cc_name = v1_lag.cc_name) and (v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1))) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 cc_name->[cc_name];RF6 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 1999)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.out deleted file mode 100644 index 5692578bb7238c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.out +++ /dev/null @@ -1,86 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_58 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id,i_item_id] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = cs_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE))) build RFs:RF12 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF10 RF11 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF9 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF8 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF8 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-03-24')) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-03-24')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 ---------------------------------PhysicalAssertNumRows -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date = '2001-03-24')) -----------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.out deleted file mode 100644 index d9b1a3ef9f8baa..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_59 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() ---------------PhysicalProject -----------------PhysicalOlapScan[store_sales] ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk) and (y.s_store_id1 = x.s_store_id2)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 52))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF3 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1219) and (d.d_month_seq >= 1208)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1207) and (d.d_month_seq >= 1196)) ---------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.out deleted file mode 100644 index 55144c3f93417e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_6 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((cnt >= 10)) -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((j.i_category = i.i_category)) otherCondition=((cast(i_current_price as DECIMALV3(38, 5)) > (1.2 * avg(cast(i_current_price as DECIMALV3(9, 4)))))) -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_month_seq = date_dim.d_month_seq)) otherCondition=() build RFs:RF4 d_month_seq->[d_month_seq] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_item_sk = i.i_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_sold_date_sk = d.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_customer_sk = s.ss_customer_sk)) otherCondition=() ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((a.ca_address_sk = c.c_current_addr_sk)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[customer] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[customer_address] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] ---------------------------PhysicalAssertNumRows -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2002)) -----------------------------------------PhysicalOlapScan[date_dim] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.out deleted file mode 100644 index 403d74c71ecae1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_60 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.out deleted file mode 100644 index 62da8c9cb21a0f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.out +++ /dev/null @@ -1,70 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_61 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[ss_promo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 RF7 RF8 RF10 ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[customer] apply RFs: RF9 -----------------------------------PhysicalProject -------------------------------------filter((store.s_gmt_offset = -7.00)) ---------------------------------------PhysicalOlapScan[store] -------------------------------PhysicalProject ---------------------------------filter(OR[(promotion.p_channel_dmail = 'Y'),(promotion.p_channel_email = 'Y'),(promotion.p_channel_tv = 'Y')]) -----------------------------------PhysicalOlapScan[promotion] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF0 c_customer_sk->[ss_customer_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF4 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------------------PhysicalProject ---------------------------------filter((store.s_gmt_offset = -7.00)) -----------------------------------PhysicalOlapScan[store] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.out deleted file mode 100644 index 582c27536d2a1b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_62 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_ship_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[ship_mode] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_site] -------------------PhysicalProject ---------------------filter((date_dim.d_month_seq <= 1205) and (date_dim.d_month_seq >= 1194)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.out deleted file mode 100644 index 9653f6c52199aa..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_63 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.out deleted file mode 100644 index ac5d0d6d739e29..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_64 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----PhysicalProject -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF19 i_item_sk->[cr_item_sk,cs_item_sk,sr_item_sk,ss_item_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd1.hd_income_band_sk = ib1.ib_income_band_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = hd1.hd_demo_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() -----------------------------------------------------PhysicalProject -------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF4 cs_item_sk->[sr_item_sk,ss_item_sk] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -----------------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF6 RF19 -----------------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF4 RF19 -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------filter((sale > (2 * refund))) -----------------------------------------------------------------------------hashAgg[GLOBAL] -------------------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() ---------------------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF19 ---------------------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF19 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------PhysicalOlapScan[customer] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------filter(d_year IN (2001, 2002)) ---------------------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------PhysicalOlapScan[store] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------PhysicalProject -------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[promotion] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[household_demographics] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[household_demographics] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[income_band] -----------------PhysicalProject -------------------PhysicalOlapScan[income_band] -------------PhysicalProject ---------------filter((item.i_current_price <= 33.00) and (item.i_current_price >= 24.00) and i_color IN ('blanched', 'brown', 'burlywood', 'chocolate', 'drab', 'medium')) -----------------PhysicalOlapScan[item] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((cs1.item_sk = cs2.item_sk) and (cs1.store_name = cs2.store_name) and (cs1.store_zip = cs2.store_zip)) otherCondition=((cs2.cnt <= cs1.cnt)) build RFs:RF20 item_sk->[item_sk];RF21 store_name->[store_name];RF22 store_zip->[store_zip] ---------------PhysicalProject -----------------filter((cs1.syear = 2001)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF20 RF21 RF22 ---------------PhysicalProject -----------------filter((cs2.syear = 2002)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.out deleted file mode 100644 index c19f18d7bfa1b6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_65 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF4 ss_store_sk->[s_store_sk,ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = sc.ss_item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] apply RFs: RF4 -----------------PhysicalProject -------------------PhysicalOlapScan[item] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -----------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.out deleted file mode 100644 index 596cb44922b54a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.out +++ /dev/null @@ -1,62 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_66 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF3 sm_ship_mode_sk->[ws_ship_mode_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[ws_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[warehouse] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF7 sm_ship_mode_sk->[cs_ship_mode_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[cs_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[warehouse] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------PhysicalOlapScan[ship_mode] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.out deleted file mode 100644 index 2370bce7b8f785..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_67 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((rk <= 100)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalPartitionTopN -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1217) and (date_dim.d_month_seq >= 1206)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.out deleted file mode 100644 index d645b44a1efaa7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_68 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Five Points', 'Pleasant Hill')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 8),(household_demographics.hd_vehicle_count = -1)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.out deleted file mode 100644 index e0bbeea823735d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_69 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[LEFT_ANTI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() -----------------------------hashJoin[LEFT_ANTI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF5 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) ---------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------filter(ca_state IN ('MI', 'TX', 'VA')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.out deleted file mode 100644 index f47da720468166..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_7 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF3 p_promo_sk->[ss_promo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'College') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'W')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) -----------------------PhysicalOlapScan[promotion] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.out deleted file mode 100644 index 9c9d7b7638d6a5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_70 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1224) and (d1.d_month_seq >= 1213)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[RIGHT_SEMI_JOIN bucketShuffle] hashCondition=((store.s_state = tmp1.s_state)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store] -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_month_seq <= 1224) and (date_dim.d_month_seq >= 1213)) ---------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.out deleted file mode 100644 index 7ae1c5b71ddaf1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_71 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[cs_sold_time_sk,ss_sold_time_sk,ws_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.sold_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk,ss_item_sk,ws_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalUnion ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1998)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 1)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter(t_meal_time IN ('breakfast', 'dinner')) -------------------------PhysicalOlapScan[time_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.out deleted file mode 100644 index cb51c1c09a0c37..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_72 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d1.d_date_sk) and (d1.d_week_seq = d2.d_week_seq)) otherCondition=((d3.d_date > cast((cast(d_date as BIGINT) + 5) as DATEV2))) build RFs:RF7 d_week_seq->[d_week_seq];RF8 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF6 hd_demo_sk->[cs_bill_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = d3.d_date_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[inv_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF3 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((warehouse.w_warehouse_sk = inventory.inv_warehouse_sk)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = inventory.inv_item_sk)) otherCondition=((inventory.inv_quantity_on_hand < catalog_sales.cs_quantity)) -----------------------------------------------PhysicalOlapScan[inventory] apply RFs: RF4 -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_returns.cr_item_sk = catalog_sales.cs_item_sk) and (catalog_returns.cr_order_number = catalog_sales.cs_order_number)) otherCondition=() ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF6 RF8 -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[promotion] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[catalog_returns] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[warehouse] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_marital_status = 'W')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[date_dim] apply RFs: RF7 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_buy_potential = '501-1000')) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((d1.d_year = 2002)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.out deleted file mode 100644 index 8cfa2b3c64b2b7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_73 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dj.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -------------filter((dj.cnt <= 5) and (dj.cnt >= 1)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Fairfield County', 'Walker County')) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.0) and hd_buy_potential IN ('501-1000', 'Unknown')) -----------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.out deleted file mode 100644 index 6915274e1a1301..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_74 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.0), (year_total / year_total), NULL) > if((year_total > 0.0), (year_total / year_total), NULL))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.sale_type = 'w') and (t_w_secyear.year = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.sale_type = 's') and (t_s_secyear.year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.sale_type = 's') and (t_s_firstyear.year = 1999) and (t_s_firstyear.year_total > 0.0)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year = 1999) and (t_w_firstyear.year_total > 0.0)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.out deleted file mode 100644 index 7a6c63c2385f24..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.out +++ /dev/null @@ -1,78 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_75 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_returns] -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Home')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1998, 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Home')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1998, 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ws_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Home')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1998, 1999)) -------------------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((curr_yr.i_brand_id = prev_yr.i_brand_id) and (curr_yr.i_category_id = prev_yr.i_category_id) and (curr_yr.i_class_id = prev_yr.i_class_id) and (curr_yr.i_manufact_id = prev_yr.i_manufact_id)) otherCondition=(((cast(cast(sales_cnt as DECIMALV3(17, 2)) as DECIMALV3(23, 8)) / cast(sales_cnt as DECIMALV3(17, 2))) < 0.900000)) build RFs:RF6 i_brand_id->[i_brand_id];RF7 i_class_id->[i_class_id];RF8 i_category_id->[i_category_id];RF9 i_manufact_id->[i_manufact_id] ---------------filter((curr_yr.d_year = 1999)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 RF7 RF8 RF9 ---------------filter((prev_yr.d_year = 1998)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.out deleted file mode 100644 index 2f21640b079929..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.out +++ /dev/null @@ -1,40 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_76 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() -------------------PhysicalProject ---------------------PhysicalUnion -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------filter(ss_hdemo_sk IS NULL) ---------------------------------PhysicalOlapScan[store_sales] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------filter(ws_bill_addr_sk IS NULL) ---------------------------------PhysicalOlapScan[web_sales] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------filter(cs_warehouse_sk IS NULL) ---------------------------------PhysicalOlapScan[catalog_sales] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.out deleted file mode 100644 index 3f4330d7466b08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_77 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ss.s_store_sk = sr.s_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.wp_web_page_sk = wr.wp_web_page_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.out deleted file mode 100644 index a6034ca86ac5c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_78 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(OR[(coalesce(ws_qty, 0) > 0),(coalesce(cs_qty, 0) > 0)]) -------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((cs.cs_customer_sk = ss.ss_customer_sk) and (cs.cs_item_sk = ss.ss_item_sk) and (cs.cs_sold_year = ss.ss_sold_year)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.ws_customer_sk = ss.ss_customer_sk) and (ws.ws_item_sk = ss.ss_item_sk) and (ws.ws_sold_year = ss.ss_sold_year)) otherCondition=() -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((store_returns.sr_ticket_number = store_sales.ss_ticket_number) and (store_sales.ss_item_sk = store_returns.sr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((web_returns.wr_order_number = web_sales.ws_order_number) and (web_sales.ws_item_sk = web_returns.wr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((catalog_returns.cr_order_number = catalog_sales.cs_order_number) and (catalog_sales.cs_item_sk = catalog_returns.cr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.out deleted file mode 100644 index 8f9b721f08ee59..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_79 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((ms.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dow = 1) and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((store.s_number_employees <= 295) and (store.s_number_employees >= 200)) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter(OR[(household_demographics.hd_dep_count = 5),(household_demographics.hd_vehicle_count > 4)]) -----------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.out deleted file mode 100644 index 40a27a12ac0692..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_8 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_substring(s_zip, 1, 2) = expr_substring(ca_zip, 1, 2))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------PhysicalIntersect -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter((cnt > 10)) -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter((customer.c_preferred_cust_flag = 'Y')) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -------------------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -----------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.out deleted file mode 100644 index 3f2cb3c7fcea3a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_80 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF3 p_promo_sk->[ss_promo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 RF3 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_returns] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------filter((item.i_current_price > 50.00)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------PhysicalOlapScan[promotion] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[cs_promo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_catalog_page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_returns] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[catalog_page] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------filter((item.i_current_price > 50.00)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------PhysicalOlapScan[promotion] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF11 p_promo_sk->[ws_promo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF10 RF11 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_returns] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_site] -------------------------------------PhysicalProject ---------------------------------------filter((item.i_current_price > 50.00)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------PhysicalOlapScan[promotion] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.out deleted file mode 100644 index 8e795ffc2e0eda..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_81 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cr_returned_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalOlapScan[catalog_returns] apply RFs: RF1 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((date_dim.d_year = 2002)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ctr_customer_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------PhysicalProject ---------------------filter((customer_address.ca_state = 'CA')) -----------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.out deleted file mode 100644 index 7c415fed511e6e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_82 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-09-07') and (date_dim.d_date >= '1999-07-09')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((item.i_current_price <= 47.00) and (item.i_current_price >= 17.00) and i_manufact_id IN (138, 169, 339, 639)) ---------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.out deleted file mode 100644 index d069a28a6b032d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.out +++ /dev/null @@ -1,80 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_83 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id,i_item_id] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = cr_items.item_id)) otherCondition=() build RFs:RF12 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[sr_returned_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[sr_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF10 RF11 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF9 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF8 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF8 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -----------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cr_returned_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cr_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -----------------------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[wr_returned_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_returns] apply RFs: RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.out deleted file mode 100644 index 78a1b815e9e0f9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_84 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[sr_cdemo_sk] -------------PhysicalProject ---------------PhysicalOlapScan[store_returns] apply RFs: RF4 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((income_band.ib_income_band_sk = household_demographics.hd_income_band_sk)) otherCondition=() build RFs:RF3 ib_income_band_sk->[hd_income_band_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF1 RF2 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------filter((customer_address.ca_city = 'Oakwood')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[household_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------filter((cast(ib_upper_bound as BIGINT) <= 55806) and (income_band.ib_lower_bound >= 5806)) ---------------------PhysicalOlapScan[income_band] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.out deleted file mode 100644 index 63fc92a6f489d9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.out +++ /dev/null @@ -1,46 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_85 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((reason.r_reason_sk = web_returns.wr_reason_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = web_returns.wr_refunded_addr_sk)) otherCondition=(OR[AND[ca_state IN ('DE', 'FL', 'TX'),(web_sales.ws_net_profit >= 100.00),(web_sales.ws_net_profit <= 200.00)],AND[ca_state IN ('ID', 'IN', 'ND'),(web_sales.ws_net_profit >= 150.00)],AND[ca_state IN ('IL', 'MT', 'OH'),(web_sales.ws_net_profit <= 250.00)]]) build RFs:RF7 ca_address_sk->[wr_refunded_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_education_status = cd2.cd_education_status) and (cd1.cd_marital_status = cd2.cd_marital_status) and (cd2.cd_demo_sk = web_returns.wr_returning_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[wr_returning_cdemo_sk];RF5 cd_marital_status->[cd_marital_status];RF6 cd_education_status->[cd_education_status] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_demo_sk = web_returns.wr_refunded_cdemo_sk)) otherCondition=(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree'),(web_sales.ws_sales_price >= 100.00),(web_sales.ws_sales_price <= 150.00)],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary'),(web_sales.ws_sales_price <= 100.00)],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree'),(web_sales.ws_sales_price >= 150.00)]]) build RFs:RF3 cd_demo_sk->[wr_refunded_cdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF0 ws_item_sk->[wr_item_sk];RF1 ws_order_number->[wr_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF0 RF1 RF3 RF4 RF7 ---------------------------------------------PhysicalProject -----------------------------------------------filter((web_sales.ws_net_profit <= 300.00) and (web_sales.ws_net_profit >= 50.00) and (web_sales.ws_sales_price <= 200.00) and (web_sales.ws_sales_price >= 50.00)) -------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_page] -------------------------------------PhysicalProject ---------------------------------------filter(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree')],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary')],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) -----------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF5 RF6 ---------------------------------PhysicalProject -----------------------------------filter(cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) -------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('DE', 'FL', 'ID', 'IL', 'IN', 'MT', 'ND', 'OH', 'TX')) ---------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter((date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.out deleted file mode 100644 index 2cb80b5a081379..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.out +++ /dev/null @@ -1,28 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_86 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((d1.d_month_seq <= 1235) and (d1.d_month_seq >= 1224)) -------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.out deleted file mode 100644 index 431debd06c471b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.out +++ /dev/null @@ -1,48 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_87 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalExcept -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.out deleted file mode 100644 index ae9b03a84ef7de..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.out +++ /dev/null @@ -1,171 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_88 -- -PhysicalResultSink ---NestedLoopJoin[CROSS_JOIN] -----NestedLoopJoin[CROSS_JOIN] -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF23 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF22 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF21 hd_demo_sk->[ss_hdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF21 RF22 RF23 -----------------------------------PhysicalProject -------------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------------PhysicalOlapScan[household_demographics] -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF20 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF19 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF18 hd_demo_sk->[ss_hdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF18 RF19 RF20 -----------------------------------PhysicalProject -------------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------------PhysicalOlapScan[household_demographics] -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute < 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF17 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF16 t_time_sk->[ss_sold_time_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF15 hd_demo_sk->[ss_hdemo_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF15 RF16 RF17 ---------------------------------PhysicalProject -----------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------------------PhysicalOlapScan[household_demographics] -----------------------------PhysicalProject -------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute >= 30)) ---------------------------------PhysicalOlapScan[time_dim] -------------------------PhysicalProject ---------------------------filter((store.s_store_name = 'ese')) -----------------------------PhysicalOlapScan[store] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF14 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF13 t_time_sk->[ss_sold_time_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF12 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF12 RF13 RF14 -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute < 30)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((store.s_store_name = 'ese')) ---------------------------PhysicalOlapScan[store] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF11 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF10 t_time_sk->[ss_sold_time_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF9 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 RF10 RF11 -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute >= 30)) -----------------------------PhysicalOlapScan[time_dim] ---------------------PhysicalProject -----------------------filter((store.s_store_name = 'ese')) -------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF7 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF6 hd_demo_sk->[ss_hdemo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute < 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((store.s_store_name = 'ese')) -----------------------PhysicalOlapScan[store] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF4 t_time_sk->[ss_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF3 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF5 -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute >= 30)) -------------------------PhysicalOlapScan[time_dim] -----------------PhysicalProject -------------------filter((store.s_store_name = 'ese')) ---------------------PhysicalOlapScan[store] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF1 t_time_sk->[ss_sold_time_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((time_dim.t_hour = 12) and (time_dim.t_minute < 30)) -----------------------PhysicalOlapScan[time_dim] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.out deleted file mode 100644 index 2e9294bce91fd2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_89 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter((if(( not (avg_monthly_sales = 0.0000)), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[i_category IN ('Electronics', 'Jewelry', 'Shoes'),i_class IN ('athletic', 'portable', 'semi-precious')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'maternity', 'rock')]] and i_category IN ('Electronics', 'Jewelry', 'Men', 'Music', 'Shoes', 'Women') and i_class IN ('accessories', 'athletic', 'maternity', 'portable', 'rock', 'semi-precious')) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_year = 1999)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.out deleted file mode 100644 index 06cd8f92785e08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.out +++ /dev/null @@ -1,115 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_9 -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------NestedLoopJoin[CROSS_JOIN] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------NestedLoopJoin[CROSS_JOIN] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------NestedLoopJoin[CROSS_JOIN] -------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------------PhysicalProject -----------------------------------------filter((reason.r_reason_sk = 1)) -------------------------------------------PhysicalOlapScan[reason] ---------------------------------------hashAgg[GLOBAL] -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[LOCAL] ---------------------------------------------PhysicalProject -----------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) ---------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------PhysicalOlapScan[store_sales] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -----------------------------------------PhysicalOlapScan[store_sales] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) ---------------------------------------PhysicalOlapScan[store_sales] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -------------------------------------PhysicalOlapScan[store_sales] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -----------------------------------PhysicalOlapScan[store_sales] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) ---------------------------------PhysicalOlapScan[store_sales] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecGather] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -------------------------------PhysicalOlapScan[store_sales] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -----------------------------PhysicalOlapScan[store_sales] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) ---------------------------PhysicalOlapScan[store_sales] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -------------------------PhysicalOlapScan[store_sales] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -----------------------PhysicalOlapScan[store_sales] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) ---------------------PhysicalOlapScan[store_sales] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.out deleted file mode 100644 index 1f880a462795bc..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_90 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF5 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF4 t_time_sk->[ws_sold_time_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF3 hd_demo_sk->[ws_ship_hdemo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_dep_count = 2)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((time_dim.t_hour <= 11) and (time_dim.t_hour >= 10)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF2 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF1 t_time_sk->[ws_sold_time_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ws_ship_hdemo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_dep_count = 2)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((time_dim.t_hour <= 17) and (time_dim.t_hour >= 16)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.out deleted file mode 100644 index 6a4b369a5fe1ed..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_91 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF5 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[c_current_cdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_returns.cr_returning_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[cr_returning_customer_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[call_center] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 RF4 RF5 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'Unknown') and cd_marital_status IN ('M', 'W')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter((hd_buy_potential like '1001-5000%')) -------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.out deleted file mode 100644 index b31b235e379e59..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_92 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(ws_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(ws_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -----------------------------PhysicalProject -------------------------------filter((item.i_manufact_id = 320)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2002-05-27') and (date_dim.d_date >= '2002-02-26')) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.out deleted file mode 100644 index 77175ca96ff6bf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_93 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_reason_sk = reason.r_reason_sk)) otherCondition=() build RFs:RF2 r_reason_sk->[sr_reason_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_returns.sr_item_sk = store_sales.ss_item_sk) and (store_returns.sr_ticket_number = store_sales.ss_ticket_number)) otherCondition=() build RFs:RF0 sr_item_sk->[ss_item_sk];RF1 sr_ticket_number->[ss_ticket_number] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_returns] apply RFs: RF2 -------------------PhysicalProject ---------------------filter((reason.r_reason_desc = 'duplicate purchase')) -----------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.out deleted file mode 100644 index 75a20cb4e35006..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_94 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ws_ship_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_ship_date_sk] ---------------------------hashJoin[LEFT_ANTI_JOIN bucketShuffle] hashCondition=((ws1.ws_order_number = wr1.wr_order_number)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2000-04-01') and (date_dim.d_date >= '2000-02-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'OK')) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((web_site.web_company_name = 'pri')) -----------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.out deleted file mode 100644 index fce09b1b6046e9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_95 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number] ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF0 RF7 ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF7 ---PhysicalResultSink -----PhysicalTopN[GATHER_SORT] -------hashAgg[DISTINCT_GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[DISTINCT_LOCAL] -------------hashAgg[GLOBAL] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF6 web_site_sk->[ws_web_site_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[ws_ship_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_ship_date_sk] -----------------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((ws1.ws_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF3 ws_order_number->[ws_order_number] -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 -------------------------------hashJoin[RIGHT_SEMI_JOIN bucketShuffle] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF2 ws_order_number->[wr_order_number];RF7 ws_order_number->[ws_order_number,ws_order_number] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() -------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '1999-04-02') and (date_dim.d_date >= '1999-02-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((customer_address.ca_state = 'NC')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------filter((web_site.web_company_name = 'pri')) -------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.out deleted file mode 100644 index 11217d6de3e01b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_96 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF1 t_time_sk->[ss_sold_time_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 3)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) -----------------------PhysicalOlapScan[time_dim] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.out deleted file mode 100644 index 4ebfd5abc0eb1c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_97 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((ssci.customer_sk = csci.customer_sk) and (ssci.item_sk = csci.item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.out deleted file mode 100644 index 1f92ed1e36f204..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_98 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter(i_category IN ('Music', 'Shoes', 'Sports')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-06-19') and (date_dim.d_date >= '2002-05-20')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.out deleted file mode 100644 index e62313c7a3d935..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_99 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_ship_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[ship_mode] -----------------------PhysicalProject -------------------------PhysicalOlapScan[call_center] -------------------PhysicalProject ---------------------filter((date_dim.d_month_seq <= 1235) and (date_dim.d_month_seq >= 1224)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.out deleted file mode 100644 index c9404710bfa16f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_1 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------PhysicalProject -------------------filter((date_dim.d_year = 2000)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ctr1.ctr_store_sk = ctr2.ctr_store_sk)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF3 ctr_store_sk->[ctr_store_sk,s_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = ctr1.ctr_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ctr_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ctr_customer_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 RF2 RF3 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter((store.s_state = 'SD')) -----------------------PhysicalOlapScan[store] apply RFs: RF3 ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.out deleted file mode 100644 index 4fdff8b37961c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[c_current_addr_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[c_current_cdemo_sk] -------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() ---------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter(ca_county IN ('Cochran County', 'Kandiyohi County', 'Marquette County', 'Storey County', 'Warren County')) -------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.out deleted file mode 100644 index e963fc6a8caa7a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_11 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000) > if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2002) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.dyear = 2002) and (t_s_secyear.sale_type = 's')) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.dyear = 2001) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.00)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 2001) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.00)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.out deleted file mode 100644 index b4c126ae67aebf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_12 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter(i_category IN ('Books', 'Men', 'Sports')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1998-05-06') and (date_dim.d_date >= '1998-04-06')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.out deleted file mode 100644 index 2cac8d809ad124..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_13 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('KS', 'MI', 'SD'),(store_sales.ss_net_profit >= 100.00),(store_sales.ss_net_profit <= 200.00)],AND[ca_state IN ('CO', 'MO', 'ND'),(store_sales.ss_net_profit >= 150.00)],AND[ca_state IN ('NH', 'OH', 'TX'),(store_sales.ss_net_profit <= 250.00)]]) build RFs:RF3 ca_address_sk->[ss_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=(OR[AND[(household_demographics.hd_dep_count = 1),cd_marital_status IN ('M', 'S'),cd_education_status IN ('4 yr Degree', 'College'),OR[AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00),(household_demographics.hd_dep_count = 3)]]) build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=() build RFs:RF1 cd_demo_sk->[ss_cdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_net_profit <= 300.00) and (store_sales.ss_net_profit >= 50.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree')]] and cd_education_status IN ('4 yr Degree', 'College', 'Unknown') and cd_marital_status IN ('D', 'M', 'S')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter(hd_dep_count IN (1, 3)) -------------------------PhysicalOlapScan[household_demographics] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('CO', 'KS', 'MI', 'MO', 'ND', 'NH', 'OH', 'SD', 'TX')) ---------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------filter((date_dim.d_year = 2001)) -----------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.out deleted file mode 100644 index 966f8701126465..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.out +++ /dev/null @@ -1,154 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_14 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_brand_id = t.brand_id) and (item.i_category_id = t.category_id) and (item.i_class_id = t.class_id)) otherCondition=() build RFs:RF6 i_brand_id->[i_brand_id,i_brand_id,i_brand_id];RF7 i_class_id->[i_class_id,i_class_id,i_class_id];RF8 i_category_id->[i_category_id,i_category_id,i_category_id] ---------PhysicalIntersect -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = iws.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF6 RF7 RF8 ---------------------PhysicalProject -----------------------filter((d3.d_year <= 2002) and (d3.d_year >= 2000)) -------------------------PhysicalOlapScan[date_dim] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = ics.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF6 RF7 RF8 ---------------------PhysicalProject -----------------------filter((d2.d_year <= 2002) and (d2.d_year >= 2000)) -------------------------PhysicalOlapScan[date_dim] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = iss.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF6 RF7 RF8 ---------------------PhysicalProject -----------------------filter((d1.d_year <= 2002) and (d1.d_year >= 2000)) -------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------PhysicalProject -------------------PhysicalUnion ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 -----------------PhysicalProject -------------------filter((date_dim.d_year <= 2002) and (date_dim.d_year >= 2000)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalRepeat -----------------------PhysicalUnion -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF12 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF11 i_item_sk->[ss_item_sk,ss_item_sk] ---------------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF10 ss_item_sk->[ss_item_sk] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF10 RF11 RF12 -----------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF11 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF15 d_date_sk->[cs_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF14 ss_item_sk->[cs_item_sk,i_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF13 i_item_sk->[cs_item_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF13 RF14 RF15 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[item] apply RFs: RF14 ---------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF18 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF17 ss_item_sk->[i_item_sk,ws_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF16 i_item_sk->[ws_item_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF16 RF17 RF18 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[item] apply RFs: RF17 ---------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.out deleted file mode 100644 index 81b0bae51498c1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_15 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),ca_state IN ('CA', 'GA', 'WA'),(catalog_sales.cs_sales_price > 500.00)]) build RFs:RF1 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF0 c_customer_sk->[cs_bill_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF2 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF1 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((date_dim.d_qoy = 1) and (date_dim.d_year = 2001)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.out deleted file mode 100644 index c6e88456a7e402..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_16 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF3 cc_call_center_sk->[cs_call_center_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[cs_ship_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_ship_date_sk] ---------------------------hashJoin[LEFT_ANTI_JOIN bucketShuffle] hashCondition=((cs1.cs_order_number = cr1.cr_order_number)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((cs1.cs_order_number = cs2.cs_order_number)) otherCondition=(( not (cs_warehouse_sk = cs_warehouse_sk))) build RFs:RF0 cs_order_number->[cs_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-05-31') and (date_dim.d_date >= '2002-04-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'WV')) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter(cc_county IN ('Barrow County', 'Daviess County', 'Luce County', 'Richland County', 'Ziebach County')) -----------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.out deleted file mode 100644 index 52da90d84ff3a8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_17 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF9 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[sr_returned_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[sr_item_sk,ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk,ss_customer_sk];RF4 cs_item_sk->[sr_item_sk,ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_customer_sk->[ss_customer_sk];RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 RF5 RF6 RF9 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF5 RF7 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((d1.d_quarter_name = '2001Q1')) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.out deleted file mode 100644 index 22f67b07a4698c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_18 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF2 cd_demo_sk->[c_current_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[cs_bill_customer_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF4 RF5 -------------------------------------------PhysicalProject ---------------------------------------------filter((cd1.cd_education_status = 'Advanced Degree') and (cd1.cd_gender = 'F')) -----------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------PhysicalProject -----------------------------------------filter(c_birth_month IN (1, 10, 2, 4, 7, 8)) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------filter(ca_state IN ('GA', 'IN', 'ME', 'NC', 'OK', 'WA', 'WY')) -----------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 1998)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.out deleted file mode 100644 index 9d4dfee81c5cc1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=(( not (substring(ca_zip, 1, 5) = substring(s_zip, 1, 5)))) build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF0 c_customer_sk->[ss_customer_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 2)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.out deleted file mode 100644 index 2be8c8135db7f3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.out +++ /dev/null @@ -1,39 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_2 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = wscs.sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF2 d_week_seq->[d_week_seq] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 53))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 1999)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF2 ---------------PhysicalProject -----------------filter((date_dim.d_year = 1998)) -------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.out deleted file mode 100644 index fa360d9c6fdc83..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_20 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter(i_category IN ('Books', 'Shoes', 'Women')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-02-25') and (date_dim.d_date >= '2002-01-26')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.out deleted file mode 100644 index 6a3b7ecf26ca2f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_21 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) <= 1.5) and (if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) >= cast((2.000000 / 3.0) as DOUBLE))) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[inv_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = inventory.inv_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF0 w_warehouse_sk->[inv_warehouse_sk] -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[warehouse] -------------------------PhysicalProject ---------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((date_dim.d_date <= '2002-03-29') and (date_dim.d_date >= '2002-01-28')) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.out deleted file mode 100644 index a96dc0686f150d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_22 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_month_seq <= 1199) and (date_dim.d_month_seq >= 1188)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.out deleted file mode 100644 index a65c74fa0b81dd..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.out +++ /dev/null @@ -1,81 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_23 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((cnt > 4)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------PhysicalOlapScan[date_dim] ---PhysicalCteAnchor ( cteId=CTEId#2 ) -----PhysicalCteProducer ( cteId=CTEId#2 ) -------PhysicalProject ---------NestedLoopJoin[INNER_JOIN](cast(ssales as DOUBLE) > cast((0.9500 * tpcds_cmax) as DOUBLE)) -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter(( not ss_customer_sk IS NULL)) -----------------------PhysicalOlapScan[store_sales] -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------filter(( not ss_customer_sk IS NULL)) -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalLimit[GLOBAL] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF4 item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 RF5 -------------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------PhysicalProject -------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF7 ws_item_sk->[item_sk] ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ws_bill_customer_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF8 -------------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -----------------------PhysicalProject -------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.out deleted file mode 100644 index 5815f02327d472..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.out +++ /dev/null @@ -1,52 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_24 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_zip = customer_address.ca_zip) and (store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_zip->[ca_zip];RF6 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (c_birth_country = upper(ca_country)))) build RFs:RF4 ca_address_sk->[c_current_addr_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[sr_item_sk,ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_ticket_number->[ss_ticket_number];RF1 sr_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF6 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF2 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] apply RFs: RF5 -----------------PhysicalProject -------------------filter((store.s_market_id = 8)) ---------------------PhysicalOlapScan[store] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------NestedLoopJoin[INNER_JOIN](cast(paid as DOUBLE) > cast((0.05 * avg(cast(netpaid as DECIMALV3(38, 4)))) as DOUBLE)) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------filter((ssales.i_color = 'beige')) -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.out deleted file mode 100644 index 10bab76c77b7f3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_25 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF9 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[sr_returned_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[sr_item_sk,ss_item_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk,ss_customer_sk];RF4 cs_item_sk->[sr_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_customer_sk->[ss_customer_sk];RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 RF5 RF6 RF9 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF5 RF7 ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] -------------------------------PhysicalProject ---------------------------------filter((d1.d_moy = 4) and (d1.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter((d2.d_moy <= 10) and (d2.d_moy >= 4) and (d2.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((d3.d_moy <= 10) and (d3.d_moy >= 4) and (d3.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.out deleted file mode 100644 index edbed407b77921..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_26 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF3 p_promo_sk->[cs_promo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'Unknown') and (customer_demographics.cd_gender = 'M') and (customer_demographics.cd_marital_status = 'S')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) -----------------------PhysicalOlapScan[promotion] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.out deleted file mode 100644 index 3eec2f7437212b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_27 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_education_status = 'Secondary') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'D')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(s_state IN ('AL', 'LA', 'MI', 'MO', 'SC', 'TN')) ---------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.out deleted file mode 100644 index 7a6bdd8868ef00..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_28 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------NestedLoopJoin[CROSS_JOIN] ---------PhysicalLimit[LOCAL] -----------NestedLoopJoin[CROSS_JOIN] -------------PhysicalLimit[LOCAL] ---------------NestedLoopJoin[CROSS_JOIN] -----------------PhysicalLimit[LOCAL] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------PhysicalLimit[LOCAL] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 5) and (store_sales.ss_quantity >= 0) and OR[AND[(store_sales.ss_list_price >= 131.00),(store_sales.ss_list_price <= 141.00)],AND[(store_sales.ss_coupon_amt >= 16798.00),(store_sales.ss_coupon_amt <= 17798.00)],AND[(store_sales.ss_wholesale_cost >= 25.00),(store_sales.ss_wholesale_cost <= 45.00)]]) -------------------------------------PhysicalOlapScan[store_sales] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 10) and (store_sales.ss_quantity >= 6) and OR[AND[(store_sales.ss_list_price >= 145.00),(store_sales.ss_list_price <= 155.00)],AND[(store_sales.ss_coupon_amt >= 14792.00),(store_sales.ss_coupon_amt <= 15792.00)],AND[(store_sales.ss_wholesale_cost >= 46.00),(store_sales.ss_wholesale_cost <= 66.00)]]) -------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalLimit[LOCAL] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 15) and (store_sales.ss_quantity >= 11) and OR[AND[(store_sales.ss_list_price >= 150.00),(store_sales.ss_list_price <= 160.00)],AND[(store_sales.ss_coupon_amt >= 6600.00),(store_sales.ss_coupon_amt <= 7600.00)],AND[(store_sales.ss_wholesale_cost >= 9.00),(store_sales.ss_wholesale_cost <= 29.00)]]) ---------------------------------PhysicalOlapScan[store_sales] -----------------PhysicalLimit[LOCAL] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 16) and OR[AND[(store_sales.ss_list_price >= 91.00),(store_sales.ss_list_price <= 101.00)],AND[(store_sales.ss_coupon_amt >= 13493.00),(store_sales.ss_coupon_amt <= 14493.00)],AND[(store_sales.ss_wholesale_cost >= 36.00),(store_sales.ss_wholesale_cost <= 56.00)]]) -----------------------------PhysicalOlapScan[store_sales] -------------PhysicalLimit[LOCAL] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 25) and (store_sales.ss_quantity >= 21) and OR[AND[(store_sales.ss_list_price >= 0.00),(store_sales.ss_list_price <= 10.00)],AND[(store_sales.ss_coupon_amt >= 7629.00),(store_sales.ss_coupon_amt <= 8629.00)],AND[(store_sales.ss_wholesale_cost >= 6.00),(store_sales.ss_wholesale_cost <= 26.00)]]) -------------------------PhysicalOlapScan[store_sales] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 30) and (store_sales.ss_quantity >= 26) and OR[AND[(store_sales.ss_list_price >= 89.00),(store_sales.ss_list_price <= 99.00)],AND[(store_sales.ss_coupon_amt >= 15257.00),(store_sales.ss_coupon_amt <= 16257.00)],AND[(store_sales.ss_wholesale_cost >= 31.00),(store_sales.ss_wholesale_cost <= 51.00)]]) ---------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.out deleted file mode 100644 index 6d0d5cb82d5160..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_29 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF9 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[sr_returned_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[sr_item_sk,ss_item_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk,ss_customer_sk];RF4 cs_item_sk->[sr_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_customer_sk->[ss_customer_sk];RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 RF5 RF6 RF9 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF5 RF7 ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] -------------------------------PhysicalProject ---------------------------------filter((d1.d_moy = 4) and (d1.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter((d2.d_moy <= 7) and (d2.d_moy >= 4) and (d2.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(d_year IN (1999, 2000, 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.out deleted file mode 100644 index 8beaf9b74953fb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_3 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((dt.d_moy = 11)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter((item.i_manufact_id = 816)) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.out deleted file mode 100644 index 6671347af5cc6c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_30 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[wr_returned_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_returns.wr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[wr_returning_addr_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_returns] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((date_dim.d_year = 2002)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF4 ctr_state->[ctr_state] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ctr_customer_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 RF4 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------PhysicalProject ---------------------filter((customer_address.ca_state = 'IN')) -----------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.out deleted file mode 100644 index 2a86f699341a7b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.out +++ /dev/null @@ -1,65 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_31 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[ss_addr_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------PhysicalProject ---------------------PhysicalOlapScan[customer_address] ---------------PhysicalProject -----------------filter((ss.d_year = 2000) and d_qoy IN (1, 2, 3)) -------------------PhysicalOlapScan[date_dim] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_sold_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ws_bill_addr_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((ws.d_year = 2000) and d_qoy IN (1, 2, 3)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalQuickSort[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalQuickSort[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws3.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF8 ca_county->[ca_county,ca_county,ca_county,ca_county] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws2.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF7 ca_county->[ca_county,ca_county,ca_county] ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss1.ca_county = ws1.ca_county)) otherCondition=() build RFs:RF6 ca_county->[ca_county,ca_county] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss2.ca_county = ss3.ca_county)) otherCondition=() build RFs:RF5 ca_county->[ca_county,ca_county] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ss1.ca_county = ss2.ca_county)) otherCondition=() build RFs:RF4 ca_county->[ca_county] -----------------------------PhysicalProject -------------------------------filter((ss1.d_qoy = 1) and (ss1.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF6 RF7 RF8 -----------------------------PhysicalProject -------------------------------filter((ss2.d_qoy = 2) and (ss2.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((ss3.d_qoy = 3) and (ss3.d_year = 2000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------PhysicalProject -------------------------filter((ws1.d_qoy = 1) and (ws1.d_year = 2000)) ---------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF7 RF8 ---------------------PhysicalProject -----------------------filter((ws2.d_qoy = 2) and (ws2.d_year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF8 -----------------PhysicalProject -------------------filter((ws3.d_qoy = 3) and (ws3.d_year = 2000)) ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.out deleted file mode 100644 index 7992f57d1c87b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_32 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------filter((cast(cs_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(cs_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) -----------------PhysicalWindow -------------------PhysicalQuickSort[LOCAL_SORT] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((item.i_manufact_id = 29)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-04-07') and (date_dim.d_date >= '1999-01-07')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.out deleted file mode 100644 index 0416c3f39cb340..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_33 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF0 i_manufact_id->[i_manufact_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF4 i_manufact_id->[i_manufact_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF8 i_manufact_id->[i_manufact_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.out deleted file mode 100644 index a19fe778a57647..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_34 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------filter((dn.cnt <= 20) and (dn.cnt >= 15)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 28) and (date_dim.d_dom >= 1) and OR[(date_dim.d_dom <= 3),(date_dim.d_dom >= 25)] and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Franklin Parish', 'Luce County', 'Richland County', 'Walker County', 'Williamson County', 'Ziebach County')) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.2) and hd_buy_potential IN ('0-500', '1001-5000')) -----------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.out deleted file mode 100644 index 83807f4b912bfe..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_35 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF5 cd_demo_sk->[c_current_cdemo_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF4 ca_address_sk->[c_current_addr_sk] -------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() ---------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer_demographics] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.out deleted file mode 100644 index 08593e2e439b92..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_36 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_year = 2002)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_state IN ('AL', 'GA', 'MI', 'MO', 'OH', 'SC', 'SD', 'TN')) -------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.out deleted file mode 100644 index 2dba5f8dad05c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_37 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-04-22') and (date_dim.d_date >= '1999-02-21')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((item.i_current_price <= 75.00) and (item.i_current_price >= 45.00) and i_manufact_id IN (1000, 707, 747, 856)) ---------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.out deleted file mode 100644 index 2bc9d9fecbca40..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.out +++ /dev/null @@ -1,50 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_38 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------PhysicalIntersect -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ws_bill_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.out deleted file mode 100644 index 90c507f9c536f5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_39 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((if((mean = 0.0), 0.0, (stdev / mean)) > 1.0)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[inv_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[inv_warehouse_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------PhysicalOlapScan[warehouse] -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1998) and d_moy IN (1, 2)) -----------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------hashJoin[INNER_JOIN shuffle] hashCondition=((inv1.i_item_sk = inv2.i_item_sk) and (inv1.w_warehouse_sk = inv2.w_warehouse_sk)) otherCondition=() build RFs:RF3 i_item_sk->[i_item_sk];RF4 w_warehouse_sk->[w_warehouse_sk] -------------filter((inv1.d_moy = 1)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------filter((inv2.d_moy = 2)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.out deleted file mode 100644 index 9a590246f64a4a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.out +++ /dev/null @@ -1,75 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_4 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk,ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF8 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2000) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF7 customer_id->[customer_id,customer_id,customer_id,customer_id] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_c_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF6 customer_id->[customer_id] -----------------------PhysicalProject -------------------------filter((t_c_secyear.dyear = 2000) and (t_c_secyear.sale_type = 'c')) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 RF7 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_c_firstyear.customer_id)) otherCondition=() build RFs:RF5 customer_id->[customer_id,customer_id] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id] -----------------------------PhysicalProject -------------------------------filter((t_s_secyear.dyear = 2000) and (t_s_secyear.sale_type = 's')) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF7 -----------------------------PhysicalProject -------------------------------filter((t_s_firstyear.dyear = 1999) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.000000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF7 ---------------------------PhysicalProject -----------------------------filter((t_c_firstyear.dyear = 1999) and (t_c_firstyear.sale_type = 'c') and (t_c_firstyear.year_total > 0.000000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 1999) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.000000)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.out deleted file mode 100644 index aae0d788557045..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_40 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF0 w_warehouse_sk->[cs_warehouse_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) ---------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter((date_dim.d_date <= '2001-05-02') and (date_dim.d_date >= '2001-03-03')) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.out deleted file mode 100644 index 3034a77fe0897a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_41 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_manufact = i1.i_manufact)) otherCondition=() build RFs:RF0 i_manufact->[i_manufact] -------------------PhysicalProject ---------------------filter((i1.i_manufact_id <= 788) and (i1.i_manufact_id >= 748)) -----------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((item_cnt > 0)) -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter(OR[AND[i_color IN ('aquamarine', 'blue', 'chartreuse', 'chiffon', 'dodger', 'gainsboro', 'tan', 'violet'),i_units IN ('Bunch', 'Dozen', 'Each', 'Ounce', 'Oz', 'Pound', 'Ton', 'Tsp'),OR[AND[(item.i_category = 'Women'),i_color IN ('aquamarine', 'gainsboro'),i_units IN ('Dozen', 'Ounce'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('chiffon', 'violet'),i_units IN ('Pound', 'Ton'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('blue', 'chartreuse'),i_units IN ('Each', 'Oz'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('dodger', 'tan'),i_units IN ('Bunch', 'Tsp'),i_size IN ('economy', 'medium')]]],AND[i_color IN ('almond', 'blanched', 'indian', 'lime', 'peru', 'saddle', 'spring', 'tomato'),i_units IN ('Box', 'Carton', 'Case', 'Dram', 'Gram', 'Pallet', 'Tbl', 'Unknown'),OR[AND[(item.i_category = 'Women'),i_color IN ('blanched', 'tomato'),i_units IN ('Case', 'Tbl'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('almond', 'lime'),i_units IN ('Box', 'Dram'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('peru', 'saddle'),i_units IN ('Gram', 'Pallet'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('indian', 'spring'),i_units IN ('Carton', 'Unknown'),i_size IN ('economy', 'medium')]]]] and i_category IN ('Men', 'Women') and i_size IN ('N/A', 'economy', 'extra large', 'large', 'medium', 'small')) ---------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.out deleted file mode 100644 index 8b26911e6afcaa..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_42 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((dt.d_moy = 11) and (dt.d_year = 2002)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((item.i_manager_id = 1)) -----------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.out deleted file mode 100644 index 37ab89010ef0a9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_43 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -5.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.out deleted file mode 100644 index c2cc91b7f43043..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.out +++ /dev/null @@ -1,69 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_44 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((i2.i_item_sk = descending.item_sk)) otherCondition=() build RFs:RF1 item_sk->[i_item_sk] -------------PhysicalProject ---------------PhysicalOlapScan[item] apply RFs: RF1 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((i1.i_item_sk = asceding.item_sk)) otherCondition=() build RFs:RF0 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((asceding.rnk = descending.rnk)) otherCondition=() ---------------------PhysicalProject -----------------------filter((rnk < 11)) -------------------------PhysicalWindow ---------------------------PhysicalQuickSort[MERGE_SORT] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------PhysicalPartitionTopN -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) ---------------------------------------PhysicalProject -----------------------------------------hashAgg[GLOBAL] -------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------hashAgg[LOCAL] -----------------------------------------------PhysicalProject -------------------------------------------------filter((ss1.ss_store_sk = 146)) ---------------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------------PhysicalProject -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------hashAgg[GLOBAL] -------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------PhysicalProject -------------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) ---------------------------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalProject -----------------------filter((rnk < 11)) -------------------------PhysicalWindow ---------------------------PhysicalQuickSort[MERGE_SORT] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------PhysicalPartitionTopN -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) ---------------------------------------PhysicalProject -----------------------------------------hashAgg[GLOBAL] -------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------hashAgg[LOCAL] -----------------------------------------------PhysicalProject -------------------------------------------------filter((ss1.ss_store_sk = 146)) ---------------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------------PhysicalProject -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------hashAgg[GLOBAL] -------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------PhysicalProject -------------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) ---------------------------------------------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.out deleted file mode 100644 index 68d1ef7855a7fd..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_45 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),$c$1]) -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF0 c_customer_sk->[ws_bill_customer_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter(i_item_sk IN (11, 13, 17, 19, 2, 23, 29, 3, 5, 7)) -----------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.out deleted file mode 100644 index 4f754d410d4b6d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_46 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 ca_address_sk->[c_current_addr_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[ss_customer_sk] -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter(d_dow IN (0, 6) and d_year IN (1999, 2000, 2001)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Centerville', 'Fairview', 'Five Points', 'Liberty', 'Oak Grove')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 6),(household_demographics.hd_vehicle_count = 0)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF5 -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.out deleted file mode 100644 index 29aa8e6ae22e6c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_47 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter(OR[(date_dim.d_year = 2001),AND[(date_dim.d_year = 2000),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2002),(date_dim.d_moy = 1)]] and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1)) and (v1.s_company_name = v1_lead.s_company_name) and (v1.s_store_name = v1_lead.s_store_name)) otherCondition=() build RFs:RF8 i_category->[i_category,i_category];RF9 i_brand->[i_brand,i_brand];RF10 s_store_name->[s_store_name,s_store_name];RF11 s_company_name->[s_company_name,s_company_name];RF12 expr_(rn - 1)->[(rn + 1),rn] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1)) and (v1.s_company_name = v1_lag.s_company_name) and (v1.s_store_name = v1_lag.s_store_name)) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 s_store_name->[s_store_name];RF6 s_company_name->[s_company_name];RF7 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 RF8 RF9 RF10 RF11 RF12 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 2001)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 RF9 RF10 RF11 RF12 -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.out deleted file mode 100644 index d994b40da36a4a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_48 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('IA', 'MD', 'MN'),(store_sales.ss_net_profit <= 2000.00)],AND[ca_state IN ('IL', 'TX', 'VA'),(store_sales.ss_net_profit >= 150.00),(store_sales.ss_net_profit <= 3000.00)],AND[ca_state IN ('IN', 'MI', 'WI'),(store_sales.ss_net_profit >= 50.00)]]) build RFs:RF2 ca_address_sk->[ss_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00)],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]) build RFs:RF1 cd_demo_sk->[ss_cdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_net_profit <= 25000.00) and (store_sales.ss_net_profit >= 0.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree')]] and cd_education_status IN ('2 yr Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'U', 'W')) -------------------------PhysicalOlapScan[customer_demographics] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('IA', 'IL', 'IN', 'MD', 'MI', 'MN', 'TX', 'VA', 'WI')) ---------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------filter((date_dim.d_year = 1999)) -----------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.out deleted file mode 100644 index 0db3fa841189b2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.out +++ /dev/null @@ -1,107 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_49 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((ws.ws_item_sk = wr.wr_item_sk) and (ws.ws_order_number = wr.wr_order_number)) otherCondition=() build RFs:RF0 wr_order_number->[ws_order_number];RF1 wr_item_sk->[ws_item_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((ws.ws_net_paid > 0.00) and (ws.ws_net_profit > 1.00) and (ws.ws_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((wr.wr_return_amt > 10000.00)) -------------------------------------------------------------------PhysicalOlapScan[web_returns] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((cs.cs_item_sk = cr.cr_item_sk) and (cs.cs_order_number = cr.cr_order_number)) otherCondition=() build RFs:RF3 cr_order_number->[cs_order_number];RF4 cr_item_sk->[cs_item_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cs.cs_net_paid > 0.00) and (cs.cs_net_profit > 1.00) and (cs.cs_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 RF5 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cr.cr_return_amount > 10000.00)) -------------------------------------------------------------------PhysicalOlapScan[catalog_returns] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sts.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ss_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((sts.ss_item_sk = sr.sr_item_sk) and (sts.ss_ticket_number = sr.sr_ticket_number)) otherCondition=() build RFs:RF6 sr_ticket_number->[ss_ticket_number];RF7 sr_item_sk->[ss_item_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sts.ss_net_paid > 0.00) and (sts.ss_net_profit > 1.00) and (sts.ss_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sr.sr_return_amt > 10000.00)) -------------------------------------------------------------------PhysicalOlapScan[store_returns] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.out deleted file mode 100644 index fde3b7f2fa20c8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.out +++ /dev/null @@ -1,77 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_5 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[sr_store_sk,ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk,ss_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cr_returned_date_sk,cs_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() build RFs:RF2 cp_catalog_page_sk->[cr_catalog_page_sk,cs_catalog_page_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_page] ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.wsr_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF7 web_site_sk->[ws_web_site_sk,ws_web_site_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk,ws_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_returns.wr_item_sk = web_sales.ws_item_sk) and (web_returns.wr_order_number = web_sales.ws_order_number)) otherCondition=() build RFs:RF4 wr_item_sk->[ws_item_sk];RF5 wr_order_number->[ws_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 RF7 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.out deleted file mode 100644 index e7941ce875f02f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_50 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[sr_returned_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_ticket_number->[ss_ticket_number];RF1 sr_item_sk->[ss_item_sk];RF2 sr_customer_sk->[ss_customer_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] apply RFs: RF5 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] -----------------------PhysicalProject -------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((d2.d_moy = 8) and (d2.d_year = 2001)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.out deleted file mode 100644 index 470fabc0f31e81..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_51 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((web_cumulative > store_cumulative)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((web.d_date = store.d_date) and (web.item_sk = store.item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.out deleted file mode 100644 index 15a333bc25275d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_52 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((dt.d_moy = 12) and (dt.d_year = 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter((item.i_manager_id = 1)) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.out deleted file mode 100644 index 04920e65ac6894..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_53 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_quarterly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_quarterly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_quarterly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.out deleted file mode 100644 index 96d57c63e6f62b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.out +++ /dev/null @@ -1,72 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_54 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) <= (d_month_seq + 3)) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) >= (d_month_seq + 1)) -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_county = store.s_county) and (customer_address.ca_state = store.s_state)) otherCondition=() build RFs:RF5 s_county->[ca_county];RF6 s_state->[ca_state] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((my_customers.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF4 ca_address_sk->[c_current_addr_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((my_customers.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF7 ---------------------------------------------PhysicalProject -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_customer_sk = cs_or_ws_sales.customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[cs_bill_customer_sk,ws_bill_customer_sk] -----------------------------------------------------PhysicalProject -------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ws_item_sk] -------------------------------------------------------------PhysicalUnion ---------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------filter((item.i_category = 'Women') and (item.i_class = 'maternity')) -----------------------------------------------------------------PhysicalOlapScan[item] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------PhysicalProject -------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF5 RF6 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalAssertNumRows -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalAssertNumRows ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) ---------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.out deleted file mode 100644 index 20d097ea52f2e0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_55 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter((item.i_manager_id = 100)) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.out deleted file mode 100644 index ef5a9edba623fc..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_56 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.out deleted file mode 100644 index 88777bc1ff548d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_57 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((call_center.cc_call_center_sk = catalog_sales.cs_call_center_sk)) otherCondition=() build RFs:RF2 cc_call_center_sk->[cs_call_center_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter(OR[(date_dim.d_year = 1999),AND[(date_dim.d_year = 1998),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2000),(date_dim.d_moy = 1)]] and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[call_center] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((v1.cc_name = v1_lead.cc_name) and (v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1))) otherCondition=() build RFs:RF7 i_category->[i_category];RF8 i_brand->[i_brand];RF9 cc_name->[cc_name];RF10 rn->[(rn - 1)] -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 RF8 RF9 RF10 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.cc_name = v1_lag.cc_name) and (v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1))) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 cc_name->[cc_name];RF6 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 1999)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.out deleted file mode 100644 index 8664d84096bc44..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.out +++ /dev/null @@ -1,86 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_58 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id,i_item_id] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = cs_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE))) build RFs:RF12 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF10 RF11 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF9 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF8 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF8 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-03-24')) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-03-24')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 ---------------------------------PhysicalAssertNumRows -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date = '2001-03-24')) -----------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.out deleted file mode 100644 index ed22b4adefa2b4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_59 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------PhysicalProject -----------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk) and (y.s_store_id1 = x.s_store_id2)) otherCondition=() build RFs:RF4 s_store_id1->[s_store_id];RF5 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 52))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF3 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] apply RFs: RF4 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1219) and (d.d_month_seq >= 1208)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 RF5 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1207) and (d.d_month_seq >= 1196)) ---------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.out deleted file mode 100644 index 5f53b66408a500..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_6 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((cnt >= 10)) -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((j.i_category = i.i_category)) otherCondition=((cast(i_current_price as DECIMALV3(38, 5)) > (1.2 * avg(cast(i_current_price as DECIMALV3(9, 4)))))) build RFs:RF5 i_category->[i_category] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_month_seq = date_dim.d_month_seq)) otherCondition=() build RFs:RF4 d_month_seq->[d_month_seq] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_item_sk = i.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_sold_date_sk = d.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_customer_sk = s.ss_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ss_customer_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((a.ca_address_sk = c.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[customer_address] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF5 ---------------------------PhysicalAssertNumRows -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2002)) -----------------------------------------PhysicalOlapScan[date_dim] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.out deleted file mode 100644 index 403d74c71ecae1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_60 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.out deleted file mode 100644 index 62da8c9cb21a0f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.out +++ /dev/null @@ -1,70 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_61 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[ss_promo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 RF7 RF8 RF10 ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[customer] apply RFs: RF9 -----------------------------------PhysicalProject -------------------------------------filter((store.s_gmt_offset = -7.00)) ---------------------------------------PhysicalOlapScan[store] -------------------------------PhysicalProject ---------------------------------filter(OR[(promotion.p_channel_dmail = 'Y'),(promotion.p_channel_email = 'Y'),(promotion.p_channel_tv = 'Y')]) -----------------------------------PhysicalOlapScan[promotion] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF0 c_customer_sk->[ss_customer_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF4 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------------------PhysicalProject ---------------------------------filter((store.s_gmt_offset = -7.00)) -----------------------------------PhysicalOlapScan[store] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.out deleted file mode 100644 index 1f0de64db91bb5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_62 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_ship_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF2 web_site_sk->[ws_web_site_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF1 sm_ship_mode_sk->[ws_ship_mode_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF0 w_warehouse_sk->[ws_warehouse_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[ship_mode] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_site] -------------------PhysicalProject ---------------------filter((date_dim.d_month_seq <= 1205) and (date_dim.d_month_seq >= 1194)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.out deleted file mode 100644 index d4fb4990da98b8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_63 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.out deleted file mode 100644 index b7cf8115b6db75..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_64 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----PhysicalProject -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF19 i_item_sk->[cr_item_sk,cs_item_sk,sr_item_sk,ss_item_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() build RFs:RF18 ib_income_band_sk->[hd_income_band_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd1.hd_income_band_sk = ib1.ib_income_band_sk)) otherCondition=() build RFs:RF17 ib_income_band_sk->[hd_income_band_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() build RFs:RF16 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF15 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() build RFs:RF14 hd_demo_sk->[c_current_hdemo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = hd1.hd_demo_sk)) otherCondition=() build RFs:RF13 hd_demo_sk->[ss_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF12 p_promo_sk->[ss_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF11 cd_demo_sk->[c_current_cdemo_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF10 cd_demo_sk->[ss_cdemo_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[c_first_shipto_date_sk] -----------------------------------------------------PhysicalProject -------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[c_first_sales_date_sk] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF7 s_store_sk->[ss_store_sk] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF4 cs_item_sk->[sr_item_sk,ss_item_sk] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_item_sk->[ss_item_sk];RF3 sr_ticket_number->[ss_ticket_number] -----------------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 RF4 RF5 RF6 RF7 RF10 RF12 RF13 RF15 RF19 -----------------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF4 RF19 -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------filter((sale > (2 * refund))) -----------------------------------------------------------------------------hashAgg[GLOBAL] -------------------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF0 cr_item_sk->[cs_item_sk];RF1 cr_order_number->[cs_order_number] ---------------------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF19 ---------------------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF19 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF8 RF9 RF11 RF14 RF16 -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------filter(d_year IN (2001, 2002)) ---------------------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------PhysicalOlapScan[store] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------PhysicalProject -------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[promotion] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF17 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF18 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[income_band] -----------------PhysicalProject -------------------PhysicalOlapScan[income_band] -------------PhysicalProject ---------------filter((item.i_current_price <= 33.00) and (item.i_current_price >= 24.00) and i_color IN ('blanched', 'brown', 'burlywood', 'chocolate', 'drab', 'medium')) -----------------PhysicalOlapScan[item] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((cs1.item_sk = cs2.item_sk) and (cs1.store_name = cs2.store_name) and (cs1.store_zip = cs2.store_zip)) otherCondition=((cs2.cnt <= cs1.cnt)) build RFs:RF20 item_sk->[item_sk];RF21 store_name->[store_name];RF22 store_zip->[store_zip] ---------------PhysicalProject -----------------filter((cs1.syear = 2001)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF20 RF21 RF22 ---------------PhysicalProject -----------------filter((cs2.syear = 2002)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.out deleted file mode 100644 index 4dd67a91e1e98e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_65 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF4 ss_store_sk->[s_store_sk,ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = sc.ss_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] apply RFs: RF4 -----------------PhysicalProject -------------------PhysicalOlapScan[item] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -----------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.out deleted file mode 100644 index 7ef36371976ad6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.out +++ /dev/null @@ -1,62 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_66 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF3 sm_ship_mode_sk->[ws_ship_mode_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[ws_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF0 w_warehouse_sk->[ws_warehouse_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[warehouse] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF7 sm_ship_mode_sk->[cs_ship_mode_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[cs_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF4 w_warehouse_sk->[cs_warehouse_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 RF7 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[warehouse] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------PhysicalOlapScan[ship_mode] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.out deleted file mode 100644 index e4a703c25ac818..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_67 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((rk <= 100)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalPartitionTopN -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1217) and (date_dim.d_month_seq >= 1206)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.out deleted file mode 100644 index 82e85b4fc4697a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_68 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 ca_address_sk->[c_current_addr_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[ss_customer_sk] -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Five Points', 'Pleasant Hill')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 8),(household_demographics.hd_vehicle_count = -1)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF5 -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.out deleted file mode 100644 index af6d7e8c85a5f6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_69 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[LEFT_ANTI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[c_current_cdemo_sk] -----------------------------hashJoin[LEFT_ANTI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF4 RF5 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) ---------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------filter(ca_state IN ('MI', 'TX', 'VA')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.out deleted file mode 100644 index 2b6615e0b93b84..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_7 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF3 p_promo_sk->[ss_promo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'College') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'W')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) -----------------------PhysicalOlapScan[promotion] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.out deleted file mode 100644 index 866c026a90dd67..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_70 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1224) and (d1.d_month_seq >= 1213)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[RIGHT_SEMI_JOIN bucketShuffle] hashCondition=((store.s_state = tmp1.s_state)) otherCondition=() build RFs:RF2 s_state->[s_state] -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store] apply RFs: RF2 -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_month_seq <= 1224) and (date_dim.d_month_seq >= 1213)) ---------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.out deleted file mode 100644 index 7ae1c5b71ddaf1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_71 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[cs_sold_time_sk,ss_sold_time_sk,ws_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.sold_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk,ss_item_sk,ws_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalUnion ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1998)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 1)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter(t_meal_time IN ('breakfast', 'dinner')) -------------------------PhysicalOlapScan[time_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.out deleted file mode 100644 index 27ea6a31a49020..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_72 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d1.d_date_sk) and (d1.d_week_seq = d2.d_week_seq)) otherCondition=((d3.d_date > cast((cast(d_date as BIGINT) + 5) as DATEV2))) build RFs:RF7 d_week_seq->[d_week_seq];RF8 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF6 hd_demo_sk->[cs_bill_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_ship_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[inv_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF3 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk,inv_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((warehouse.w_warehouse_sk = inventory.inv_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[inv_warehouse_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = inventory.inv_item_sk)) otherCondition=((inventory.inv_quantity_on_hand < catalog_sales.cs_quantity)) build RFs:RF0 cs_item_sk->[inv_item_sk] -----------------------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 RF4 -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_returns.cr_item_sk = catalog_sales.cs_item_sk) and (catalog_returns.cr_order_number = catalog_sales.cs_order_number)) otherCondition=() ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 RF5 RF6 RF8 -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[promotion] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[catalog_returns] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[warehouse] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_marital_status = 'W')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[date_dim] apply RFs: RF7 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_buy_potential = '501-1000')) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((d1.d_year = 2002)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.out deleted file mode 100644 index a7655a2c3f268a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_73 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dj.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------filter((dj.cnt <= 5) and (dj.cnt >= 1)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Fairfield County', 'Walker County')) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.0) and hd_buy_potential IN ('501-1000', 'Unknown')) -----------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.out deleted file mode 100644 index c32a9187e34e92..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_74 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.0), (year_total / year_total), NULL) > if((year_total > 0.0), (year_total / year_total), NULL))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.sale_type = 'w') and (t_w_secyear.year = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.sale_type = 's') and (t_s_secyear.year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.sale_type = 's') and (t_s_firstyear.year = 1999) and (t_s_firstyear.year_total > 0.0)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year = 1999) and (t_w_firstyear.year_total > 0.0)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.out deleted file mode 100644 index 7a6c63c2385f24..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.out +++ /dev/null @@ -1,78 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_75 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_returns] -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Home')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1998, 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Home')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1998, 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ws_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Home')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1998, 1999)) -------------------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((curr_yr.i_brand_id = prev_yr.i_brand_id) and (curr_yr.i_category_id = prev_yr.i_category_id) and (curr_yr.i_class_id = prev_yr.i_class_id) and (curr_yr.i_manufact_id = prev_yr.i_manufact_id)) otherCondition=(((cast(cast(sales_cnt as DECIMALV3(17, 2)) as DECIMALV3(23, 8)) / cast(sales_cnt as DECIMALV3(17, 2))) < 0.900000)) build RFs:RF6 i_brand_id->[i_brand_id];RF7 i_class_id->[i_class_id];RF8 i_category_id->[i_category_id];RF9 i_manufact_id->[i_manufact_id] ---------------filter((curr_yr.d_year = 1999)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 RF7 RF8 RF9 ---------------filter((prev_yr.d_year = 1998)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.out deleted file mode 100644 index 8f739a1d12b35a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.out +++ /dev/null @@ -1,40 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_76 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -------------------PhysicalProject ---------------------PhysicalUnion -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------filter(ss_hdemo_sk IS NULL) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------filter(ws_bill_addr_sk IS NULL) ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -----------------------------PhysicalProject -------------------------------filter(cs_warehouse_sk IS NULL) ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.out deleted file mode 100644 index cdecac9706c07d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_77 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ss.s_store_sk = sr.s_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[sr_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.wp_web_page_sk = wr.wp_web_page_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF9 wp_web_page_sk->[ws_web_page_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF7 wp_web_page_sk->[wr_web_page_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 RF7 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.out deleted file mode 100644 index a6034ca86ac5c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_78 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(OR[(coalesce(ws_qty, 0) > 0),(coalesce(cs_qty, 0) > 0)]) -------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((cs.cs_customer_sk = ss.ss_customer_sk) and (cs.cs_item_sk = ss.ss_item_sk) and (cs.cs_sold_year = ss.ss_sold_year)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.ws_customer_sk = ss.ss_customer_sk) and (ws.ws_item_sk = ss.ss_item_sk) and (ws.ws_sold_year = ss.ss_sold_year)) otherCondition=() -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((store_returns.sr_ticket_number = store_sales.ss_ticket_number) and (store_sales.ss_item_sk = store_returns.sr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((web_returns.wr_order_number = web_sales.ws_order_number) and (web_sales.ws_item_sk = web_returns.wr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((catalog_returns.cr_order_number = catalog_sales.cs_order_number) and (catalog_sales.cs_item_sk = catalog_returns.cr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.out deleted file mode 100644 index 1a19308d991441..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_79 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((ms.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dow = 1) and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((store.s_number_employees <= 295) and (store.s_number_employees >= 200)) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter(OR[(household_demographics.hd_dep_count = 5),(household_demographics.hd_vehicle_count > 4)]) -----------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.out deleted file mode 100644 index 5047f4a7878cc1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_8 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_substring(s_zip, 1, 2) = expr_substring(ca_zip, 1, 2))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------PhysicalIntersect -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter((cnt > 10)) -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter((customer.c_preferred_cust_flag = 'Y')) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -------------------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -----------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.out deleted file mode 100644 index e980bad3f49872..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_80 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF3 p_promo_sk->[ss_promo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_returns] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------filter((item.i_current_price > 50.00)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------PhysicalOlapScan[promotion] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[cs_promo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_catalog_page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() build RFs:RF4 cp_catalog_page_sk->[cs_catalog_page_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 RF7 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_returns] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[catalog_page] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------filter((item.i_current_price > 50.00)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------PhysicalOlapScan[promotion] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF11 p_promo_sk->[ws_promo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF9 web_site_sk->[ws_web_site_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 RF10 RF11 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_returns] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_site] -------------------------------------PhysicalProject ---------------------------------------filter((item.i_current_price > 50.00)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------PhysicalOlapScan[promotion] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.out deleted file mode 100644 index 8006799a3cc5fd..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_81 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cr_returned_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[cr_returning_addr_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[catalog_returns] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((date_dim.d_year = 2002)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF4 ctr_state->[ctr_state] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ctr_customer_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 RF4 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------PhysicalProject ---------------------filter((customer_address.ca_state = 'CA')) -----------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.out deleted file mode 100644 index 7c415fed511e6e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_82 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-09-07') and (date_dim.d_date >= '1999-07-09')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((item.i_current_price <= 47.00) and (item.i_current_price >= 17.00) and i_manufact_id IN (138, 169, 339, 639)) ---------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.out deleted file mode 100644 index eb9901a16a7b86..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.out +++ /dev/null @@ -1,80 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_83 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id,i_item_id] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = cr_items.item_id)) otherCondition=() build RFs:RF12 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[sr_returned_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[sr_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF10 RF11 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF9 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF8 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF8 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -----------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cr_returned_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cr_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -----------------------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[wr_returned_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[wr_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_returns] apply RFs: RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.out deleted file mode 100644 index b6aab0b1439f62..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_84 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[sr_cdemo_sk] -------------PhysicalProject ---------------PhysicalOlapScan[store_returns] apply RFs: RF4 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((income_band.ib_income_band_sk = household_demographics.hd_income_band_sk)) otherCondition=() build RFs:RF3 ib_income_band_sk->[hd_income_band_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[c_current_cdemo_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------filter((customer_address.ca_city = 'Oakwood')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[household_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------filter((cast(ib_upper_bound as BIGINT) <= 55806) and (income_band.ib_lower_bound >= 5806)) ---------------------PhysicalOlapScan[income_band] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.out deleted file mode 100644 index 62c4b147f59a22..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.out +++ /dev/null @@ -1,46 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_85 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((reason.r_reason_sk = web_returns.wr_reason_sk)) otherCondition=() build RFs:RF9 r_reason_sk->[wr_reason_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = web_returns.wr_refunded_addr_sk)) otherCondition=(OR[AND[ca_state IN ('DE', 'FL', 'TX'),(web_sales.ws_net_profit >= 100.00),(web_sales.ws_net_profit <= 200.00)],AND[ca_state IN ('ID', 'IN', 'ND'),(web_sales.ws_net_profit >= 150.00)],AND[ca_state IN ('IL', 'MT', 'OH'),(web_sales.ws_net_profit <= 250.00)]]) build RFs:RF7 ca_address_sk->[wr_refunded_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_education_status = cd2.cd_education_status) and (cd1.cd_marital_status = cd2.cd_marital_status) and (cd2.cd_demo_sk = web_returns.wr_returning_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[wr_returning_cdemo_sk];RF5 cd_marital_status->[cd_marital_status];RF6 cd_education_status->[cd_education_status] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_demo_sk = web_returns.wr_refunded_cdemo_sk)) otherCondition=(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree'),(web_sales.ws_sales_price >= 100.00),(web_sales.ws_sales_price <= 150.00)],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary'),(web_sales.ws_sales_price <= 100.00)],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree'),(web_sales.ws_sales_price >= 150.00)]]) build RFs:RF3 cd_demo_sk->[wr_refunded_cdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF2 wp_web_page_sk->[ws_web_page_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF0 ws_item_sk->[wr_item_sk];RF1 ws_order_number->[wr_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF0 RF1 RF3 RF4 RF7 RF9 ---------------------------------------------PhysicalProject -----------------------------------------------filter((web_sales.ws_net_profit <= 300.00) and (web_sales.ws_net_profit >= 50.00) and (web_sales.ws_sales_price <= 200.00) and (web_sales.ws_sales_price >= 50.00)) -------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF8 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_page] -------------------------------------PhysicalProject ---------------------------------------filter(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree')],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary')],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) -----------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF5 RF6 ---------------------------------PhysicalProject -----------------------------------filter(cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) -------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('DE', 'FL', 'ID', 'IL', 'IN', 'MT', 'ND', 'OH', 'TX')) ---------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter((date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.out deleted file mode 100644 index 13c2b5c88bc677..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.out +++ /dev/null @@ -1,28 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_86 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((d1.d_month_seq <= 1235) and (d1.d_month_seq >= 1224)) -------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.out deleted file mode 100644 index a71098a021052f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.out +++ /dev/null @@ -1,48 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_87 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalExcept -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ss_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ws_bill_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.out deleted file mode 100644 index ae9b03a84ef7de..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.out +++ /dev/null @@ -1,171 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_88 -- -PhysicalResultSink ---NestedLoopJoin[CROSS_JOIN] -----NestedLoopJoin[CROSS_JOIN] -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF23 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF22 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF21 hd_demo_sk->[ss_hdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF21 RF22 RF23 -----------------------------------PhysicalProject -------------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------------PhysicalOlapScan[household_demographics] -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF20 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF19 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF18 hd_demo_sk->[ss_hdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF18 RF19 RF20 -----------------------------------PhysicalProject -------------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------------PhysicalOlapScan[household_demographics] -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute < 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF17 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF16 t_time_sk->[ss_sold_time_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF15 hd_demo_sk->[ss_hdemo_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF15 RF16 RF17 ---------------------------------PhysicalProject -----------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------------------PhysicalOlapScan[household_demographics] -----------------------------PhysicalProject -------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute >= 30)) ---------------------------------PhysicalOlapScan[time_dim] -------------------------PhysicalProject ---------------------------filter((store.s_store_name = 'ese')) -----------------------------PhysicalOlapScan[store] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF14 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF13 t_time_sk->[ss_sold_time_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF12 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF12 RF13 RF14 -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute < 30)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((store.s_store_name = 'ese')) ---------------------------PhysicalOlapScan[store] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF11 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF10 t_time_sk->[ss_sold_time_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF9 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 RF10 RF11 -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute >= 30)) -----------------------------PhysicalOlapScan[time_dim] ---------------------PhysicalProject -----------------------filter((store.s_store_name = 'ese')) -------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF7 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF6 hd_demo_sk->[ss_hdemo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute < 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((store.s_store_name = 'ese')) -----------------------PhysicalOlapScan[store] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF4 t_time_sk->[ss_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF3 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF5 -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute >= 30)) -------------------------PhysicalOlapScan[time_dim] -----------------PhysicalProject -------------------filter((store.s_store_name = 'ese')) ---------------------PhysicalOlapScan[store] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF1 t_time_sk->[ss_sold_time_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((time_dim.t_hour = 12) and (time_dim.t_minute < 30)) -----------------------PhysicalOlapScan[time_dim] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.out deleted file mode 100644 index e4d2ae3435f174..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_89 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter((if(( not (avg_monthly_sales = 0.0000)), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[i_category IN ('Electronics', 'Jewelry', 'Shoes'),i_class IN ('athletic', 'portable', 'semi-precious')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'maternity', 'rock')]] and i_category IN ('Electronics', 'Jewelry', 'Men', 'Music', 'Shoes', 'Women') and i_class IN ('accessories', 'athletic', 'maternity', 'portable', 'rock', 'semi-precious')) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_year = 1999)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.out deleted file mode 100644 index 06cd8f92785e08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.out +++ /dev/null @@ -1,115 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_9 -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------NestedLoopJoin[CROSS_JOIN] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------NestedLoopJoin[CROSS_JOIN] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------NestedLoopJoin[CROSS_JOIN] -------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------------PhysicalProject -----------------------------------------filter((reason.r_reason_sk = 1)) -------------------------------------------PhysicalOlapScan[reason] ---------------------------------------hashAgg[GLOBAL] -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[LOCAL] ---------------------------------------------PhysicalProject -----------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) ---------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------PhysicalOlapScan[store_sales] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -----------------------------------------PhysicalOlapScan[store_sales] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) ---------------------------------------PhysicalOlapScan[store_sales] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -------------------------------------PhysicalOlapScan[store_sales] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -----------------------------------PhysicalOlapScan[store_sales] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) ---------------------------------PhysicalOlapScan[store_sales] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecGather] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -------------------------------PhysicalOlapScan[store_sales] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -----------------------------PhysicalOlapScan[store_sales] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) ---------------------------PhysicalOlapScan[store_sales] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -------------------------PhysicalOlapScan[store_sales] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -----------------------PhysicalOlapScan[store_sales] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) ---------------------PhysicalOlapScan[store_sales] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.out deleted file mode 100644 index 1f880a462795bc..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_90 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF5 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF4 t_time_sk->[ws_sold_time_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF3 hd_demo_sk->[ws_ship_hdemo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_dep_count = 2)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((time_dim.t_hour <= 11) and (time_dim.t_hour >= 10)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF2 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF1 t_time_sk->[ws_sold_time_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ws_ship_hdemo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_dep_count = 2)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((time_dim.t_hour <= 17) and (time_dim.t_hour >= 16)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.out deleted file mode 100644 index a2e5d4a7660114..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_91 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF5 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[c_current_cdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_returns.cr_returning_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[cr_returning_customer_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF0 cc_call_center_sk->[cr_call_center_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[call_center] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 RF4 RF5 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'Unknown') and cd_marital_status IN ('M', 'W')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter((hd_buy_potential like '1001-5000%')) -------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.out deleted file mode 100644 index b31b235e379e59..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_92 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(ws_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(ws_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -----------------------------PhysicalProject -------------------------------filter((item.i_manufact_id = 320)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2002-05-27') and (date_dim.d_date >= '2002-02-26')) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.out deleted file mode 100644 index 77175ca96ff6bf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_93 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_reason_sk = reason.r_reason_sk)) otherCondition=() build RFs:RF2 r_reason_sk->[sr_reason_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_returns.sr_item_sk = store_sales.ss_item_sk) and (store_returns.sr_ticket_number = store_sales.ss_ticket_number)) otherCondition=() build RFs:RF0 sr_item_sk->[ss_item_sk];RF1 sr_ticket_number->[ss_ticket_number] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_returns] apply RFs: RF2 -------------------PhysicalProject ---------------------filter((reason.r_reason_desc = 'duplicate purchase')) -----------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.out deleted file mode 100644 index 75a20cb4e35006..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_94 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ws_ship_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_ship_date_sk] ---------------------------hashJoin[LEFT_ANTI_JOIN bucketShuffle] hashCondition=((ws1.ws_order_number = wr1.wr_order_number)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2000-04-01') and (date_dim.d_date >= '2000-02-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'OK')) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((web_site.web_company_name = 'pri')) -----------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.out deleted file mode 100644 index aaaba243279489..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_95 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number];RF1 ws_order_number->[ws_order_number] ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF14 RF15 ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF14 RF15 ---PhysicalResultSink -----PhysicalTopN[GATHER_SORT] -------hashAgg[DISTINCT_GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[DISTINCT_LOCAL] -------------hashAgg[GLOBAL] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF12 web_site_sk->[ws_web_site_sk];RF13 web_site_sk->[ws_web_site_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF10 ca_address_sk->[ws_ship_addr_sk];RF11 ca_address_sk->[ws_ship_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_ship_date_sk];RF9 d_date_sk->[ws_ship_date_sk] -----------------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((ws1.ws_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF6 ws_order_number->[ws_order_number];RF7 ws_order_number->[ws_order_number] -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 RF7 -------------------------------hashJoin[RIGHT_SEMI_JOIN bucketShuffle] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF4 ws_order_number->[wr_order_number];RF5 ws_order_number->[wr_order_number];RF14 ws_order_number->[ws_order_number,ws_order_number];RF15 ws_order_number->[ws_order_number,ws_order_number] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF2 wr_order_number->[ws_order_number];RF3 wr_order_number->[ws_order_number] -------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 RF10 RF11 RF12 RF13 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '1999-04-02') and (date_dim.d_date >= '1999-02-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((customer_address.ca_state = 'NC')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------filter((web_site.web_company_name = 'pri')) -------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.out deleted file mode 100644 index 11217d6de3e01b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_96 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF1 t_time_sk->[ss_sold_time_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 3)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) -----------------------PhysicalOlapScan[time_dim] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.out deleted file mode 100644 index 4ebfd5abc0eb1c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_97 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((ssci.customer_sk = csci.customer_sk) and (ssci.item_sk = csci.item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.out deleted file mode 100644 index 1f92ed1e36f204..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_98 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter(i_category IN ('Music', 'Shoes', 'Sports')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-06-19') and (date_dim.d_date >= '2002-05-20')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.out deleted file mode 100644 index addda24e68b119..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_99 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_ship_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF2 cc_call_center_sk->[cs_call_center_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF1 sm_ship_mode_sk->[cs_ship_mode_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF0 w_warehouse_sk->[cs_warehouse_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[ship_mode] -----------------------PhysicalProject -------------------------PhysicalOlapScan[call_center] -------------------PhysicalProject ---------------------filter((date_dim.d_month_seq <= 1235) and (date_dim.d_month_seq >= 1224)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query1.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query1.out deleted file mode 100644 index 9ee4e3a3195cb8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query1.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_1 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------PhysicalProject -------------------filter((date_dim.d_year = 2000)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ctr_customer_sk->[c_customer_sk] ---------------PhysicalProject -----------------PhysicalOlapScan[customer] apply RFs: RF3 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_store_sk = ctr2.ctr_store_sk)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store.s_store_sk = ctr1.ctr_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ctr_store_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((store.s_state = 'SD')) ---------------------------PhysicalOlapScan[store] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query10.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query10.out deleted file mode 100644 index 4dfc2de4cf3fe5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query10.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(ca_county IN ('Cochran County', 'Kandiyohi County', 'Marquette County', 'Storey County', 'Warren County')) ---------------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query11.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query11.out deleted file mode 100644 index e7ae73f8e00980..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query11.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_11 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000) > if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2002) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.dyear = 2002) and (t_s_secyear.sale_type = 's')) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.dyear = 2001) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.00)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 2001) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.00)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query12.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query12.out deleted file mode 100644 index be61da2020ee40..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query12.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_12 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ws_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '1998-05-06') and (date_dim.d_date >= '1998-04-06')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Books', 'Men', 'Sports')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query13.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query13.out deleted file mode 100644 index 55cadf71bf5d45..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query13.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_13 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(household_demographics.hd_dep_count = 1),cd_marital_status IN ('M', 'S'),cd_education_status IN ('4 yr Degree', 'College'),OR[AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00),(household_demographics.hd_dep_count = 3)]]) build RFs:RF3 ss_cdemo_sk->[cd_demo_sk] -----------------PhysicalProject -------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree')]] and cd_education_status IN ('4 yr Degree', 'College', 'Unknown') and cd_marital_status IN ('D', 'M', 'S')) ---------------------PhysicalOlapScan[customer_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('KS', 'MI', 'SD'),(store_sales.ss_net_profit >= 100.00),(store_sales.ss_net_profit <= 200.00)],AND[ca_state IN ('CO', 'MO', 'ND'),(store_sales.ss_net_profit >= 150.00)],AND[ca_state IN ('NH', 'OH', 'TX'),(store_sales.ss_net_profit <= 250.00)]]) build RFs:RF0 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_net_profit <= 300.00) and (store_sales.ss_net_profit >= 50.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('CO', 'KS', 'MI', 'MO', 'ND', 'NH', 'OH', 'SD', 'TX')) ---------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter((date_dim.d_year = 2001)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter(hd_dep_count IN (1, 3)) -------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query14.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query14.out deleted file mode 100644 index 2a29746e37ef07..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query14.out +++ /dev/null @@ -1,154 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_14 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_brand_id = t.brand_id) and (item.i_category_id = t.category_id) and (item.i_class_id = t.class_id)) otherCondition=() build RFs:RF6 brand_id->[i_brand_id];RF7 class_id->[i_class_id];RF8 category_id->[i_category_id] ---------PhysicalProject -----------PhysicalOlapScan[item] apply RFs: RF6 RF7 RF8 ---------PhysicalIntersect -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = iss.i_item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((d1.d_year <= 2002) and (d1.d_year >= 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = ics.i_item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------------PhysicalProject ---------------------------filter((d2.d_year <= 2002) and (d2.d_year >= 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = iws.i_item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 -------------------------PhysicalProject ---------------------------filter((d3.d_year <= 2002) and (d3.d_year >= 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------PhysicalProject -------------------PhysicalUnion ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 -----------------PhysicalProject -------------------filter((date_dim.d_year <= 2002) and (date_dim.d_year >= 2000)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalRepeat -----------------------PhysicalUnion -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -----------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cross_items.ss_item_sk)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF10 d_date_sk->[ss_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF10 -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = cross_items.ss_item_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF13 d_date_sk->[cs_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF13 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = cross_items.ss_item_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF16 d_date_sk->[ws_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF16 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query15.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query15.out deleted file mode 100644 index c070b7d34c00d4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query15.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_15 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),ca_state IN ('CA', 'GA', 'WA'),(catalog_sales.cs_sales_price > 500.00)]) -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_qoy = 1) and (date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query16.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query16.out deleted file mode 100644 index 1733e793f1f9b0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query16.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_16 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((cs1.cs_order_number = cs2.cs_order_number)) otherCondition=(( not (cs_warehouse_sk = cs_warehouse_sk))) build RFs:RF3 cs_order_number->[cs_order_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF2 cc_call_center_sk->[cs_call_center_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_ship_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[cs_ship_addr_sk] -------------------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((cs1.cs_order_number = cr1.cr_order_number)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_returns] -------------------------------PhysicalProject ---------------------------------filter((customer_address.ca_state = 'WV')) -----------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-05-31') and (date_dim.d_date >= '2002-04-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(cc_county IN ('Barrow County', 'Daviess County', 'Luce County', 'Richland County', 'Ziebach County')) ---------------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query17.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query17.out deleted file mode 100644 index 5342955a97aae2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query17.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_17 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF8 sr_customer_sk->[cs_bill_customer_sk];RF9 sr_item_sk->[cs_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -------------------------PhysicalProject ---------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_quarter_name = '2001Q1')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query18.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query18.out deleted file mode 100644 index 57183675eb5fc2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query18.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_18 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF5 cs_item_sk->[i_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF2 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 RF4 -----------------------------------PhysicalProject -------------------------------------filter((cd1.cd_education_status = 'Advanced Degree') and (cd1.cd_gender = 'F')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter(c_birth_month IN (1, 10, 2, 4, 7, 8)) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(ca_state IN ('GA', 'IN', 'ME', 'NC', 'OK', 'WA', 'WY')) -------------------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query19.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query19.out deleted file mode 100644 index 722f7cfee673fc..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query19.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=(( not (substring(ca_zip, 1, 5) = substring(s_zip, 1, 5)))) ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ss_customer_sk->[c_customer_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------filter((item.i_manager_id = 2)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query2.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query2.out deleted file mode 100644 index 5888221893d260..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query2.out +++ /dev/null @@ -1,39 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_2 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = wscs.sold_date_sk)) otherCondition=() ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 53))) otherCondition=() ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF2 d_week_seq->[d_week_seq] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF2 -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1998)) -----------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF1 -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1999)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query20.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query20.out deleted file mode 100644 index 16785cbee81da3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query20.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_20 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '2002-02-25') and (date_dim.d_date >= '2002-01-26')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Books', 'Shoes', 'Women')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query21.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query21.out deleted file mode 100644 index 991b448adf9f0c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query21.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_21 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) <= 1.5) and (if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) >= cast((2.000000 / 3.0) as DOUBLE))) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = inventory.inv_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 -----------------------------PhysicalProject -------------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2002-03-29') and (date_dim.d_date >= '2002-01-28')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query22.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query22.out deleted file mode 100644 index 7f10ebd7894ce7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query22.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_22 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[inventory] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_month_seq <= 1199) and (date_dim.d_month_seq >= 1188)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query23.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query23.out deleted file mode 100644 index 6103f4eb74d6ab..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query23.out +++ /dev/null @@ -1,81 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_23 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((cnt > 4)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter(d_year IN (2000, 2001, 2002, 2003)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#2 ) -----PhysicalCteProducer ( cteId=CTEId#2 ) -------PhysicalProject ---------NestedLoopJoin[INNER_JOIN](cast(ssales as DOUBLE) > cast((0.9500 * tpcds_cmax) as DOUBLE)) -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter(( not ss_customer_sk IS NULL)) -----------------------PhysicalOlapScan[store_sales] -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------filter(( not ss_customer_sk IS NULL)) -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalLimit[GLOBAL] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF5 cs_item_sk->[item_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -------------------PhysicalProject ---------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF8 ws_item_sk->[item_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF7 c_customer_sk->[ws_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query24.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query24.out deleted file mode 100644 index 3d2e80e2381a00..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query24.out +++ /dev/null @@ -1,52 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_24 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_zip = customer_address.ca_zip) and (store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((store.s_market_id = 8)) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (c_birth_country = upper(ca_country)))) -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------NestedLoopJoin[INNER_JOIN](cast(paid as DOUBLE) > cast((0.05 * avg(cast(netpaid as DECIMALV3(38, 4)))) as DOUBLE)) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------filter((ssales.i_color = 'beige')) -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query25.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query25.out deleted file mode 100644 index cbc48e3165ade9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query25.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_25 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF8 sr_customer_sk->[cs_bill_customer_sk];RF9 sr_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -----------------------PhysicalProject -------------------------filter((d3.d_moy <= 10) and (d3.d_moy >= 4) and (d3.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -----------------------------------PhysicalProject -------------------------------------filter((d1.d_moy = 4) and (d1.d_year = 2000)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((d2.d_moy <= 10) and (d2.d_moy >= 4) and (d2.d_year = 2000)) ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query26.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query26.out deleted file mode 100644 index 37ede3b355320f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query26.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_26 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[cs_promo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'Unknown') and (customer_demographics.cd_gender = 'M') and (customer_demographics.cd_marital_status = 'S')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) ---------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query27.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query27.out deleted file mode 100644 index 9b311ff91423bf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query27.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_27 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF3 -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_education_status = 'Secondary') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'D')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter(s_state IN ('AL', 'LA', 'MI', 'MO', 'SC', 'TN')) ---------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query28.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query28.out deleted file mode 100644 index 7a6bdd8868ef00..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query28.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_28 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------NestedLoopJoin[CROSS_JOIN] ---------PhysicalLimit[LOCAL] -----------NestedLoopJoin[CROSS_JOIN] -------------PhysicalLimit[LOCAL] ---------------NestedLoopJoin[CROSS_JOIN] -----------------PhysicalLimit[LOCAL] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------PhysicalLimit[LOCAL] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 5) and (store_sales.ss_quantity >= 0) and OR[AND[(store_sales.ss_list_price >= 131.00),(store_sales.ss_list_price <= 141.00)],AND[(store_sales.ss_coupon_amt >= 16798.00),(store_sales.ss_coupon_amt <= 17798.00)],AND[(store_sales.ss_wholesale_cost >= 25.00),(store_sales.ss_wholesale_cost <= 45.00)]]) -------------------------------------PhysicalOlapScan[store_sales] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 10) and (store_sales.ss_quantity >= 6) and OR[AND[(store_sales.ss_list_price >= 145.00),(store_sales.ss_list_price <= 155.00)],AND[(store_sales.ss_coupon_amt >= 14792.00),(store_sales.ss_coupon_amt <= 15792.00)],AND[(store_sales.ss_wholesale_cost >= 46.00),(store_sales.ss_wholesale_cost <= 66.00)]]) -------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalLimit[LOCAL] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 15) and (store_sales.ss_quantity >= 11) and OR[AND[(store_sales.ss_list_price >= 150.00),(store_sales.ss_list_price <= 160.00)],AND[(store_sales.ss_coupon_amt >= 6600.00),(store_sales.ss_coupon_amt <= 7600.00)],AND[(store_sales.ss_wholesale_cost >= 9.00),(store_sales.ss_wholesale_cost <= 29.00)]]) ---------------------------------PhysicalOlapScan[store_sales] -----------------PhysicalLimit[LOCAL] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 16) and OR[AND[(store_sales.ss_list_price >= 91.00),(store_sales.ss_list_price <= 101.00)],AND[(store_sales.ss_coupon_amt >= 13493.00),(store_sales.ss_coupon_amt <= 14493.00)],AND[(store_sales.ss_wholesale_cost >= 36.00),(store_sales.ss_wholesale_cost <= 56.00)]]) -----------------------------PhysicalOlapScan[store_sales] -------------PhysicalLimit[LOCAL] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 25) and (store_sales.ss_quantity >= 21) and OR[AND[(store_sales.ss_list_price >= 0.00),(store_sales.ss_list_price <= 10.00)],AND[(store_sales.ss_coupon_amt >= 7629.00),(store_sales.ss_coupon_amt <= 8629.00)],AND[(store_sales.ss_wholesale_cost >= 6.00),(store_sales.ss_wholesale_cost <= 26.00)]]) -------------------------PhysicalOlapScan[store_sales] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 30) and (store_sales.ss_quantity >= 26) and OR[AND[(store_sales.ss_list_price >= 89.00),(store_sales.ss_list_price <= 99.00)],AND[(store_sales.ss_coupon_amt >= 15257.00),(store_sales.ss_coupon_amt <= 16257.00)],AND[(store_sales.ss_wholesale_cost >= 31.00),(store_sales.ss_wholesale_cost <= 51.00)]]) ---------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query29.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query29.out deleted file mode 100644 index 4e04f8042e2e31..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query29.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_29 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF7 sr_customer_sk->[cs_bill_customer_sk];RF8 sr_item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 ---------------------------------------PhysicalProject -----------------------------------------filter((d1.d_moy = 4) and (d1.d_year = 1999)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter((d2.d_moy <= 7) and (d2.d_moy >= 4) and (d2.d_year = 1999)) -------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------filter(d_year IN (1999, 2000, 2001)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query3.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query3.out deleted file mode 100644 index 4092c73d09fd5b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query3.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_3 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manufact_id = 816)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((dt.d_moy = 11)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query30.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query30.out deleted file mode 100644 index 99f54520a25e12..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query30.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_30 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_returns.wr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[wr_returned_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_returns] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2002)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ctr_customer_sk] -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'IN')) ---------------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query31.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query31.out deleted file mode 100644 index eb49a9cb10b936..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query31.out +++ /dev/null @@ -1,65 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_31 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((ss.d_year = 2000) and d_qoy IN (1, 2, 3)) -----------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------PhysicalOlapScan[customer_address] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_sales] apply RFs: RF2 ---------------------PhysicalProject -----------------------filter((ws.d_year = 2000) and d_qoy IN (1, 2, 3)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] -----PhysicalResultSink -------PhysicalQuickSort[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalQuickSort[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ws1.ca_county = ws3.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF8 ca_county->[ca_county] -----------------PhysicalProject -------------------filter((ws3.d_qoy = 3) and (ws3.d_year = 2000)) ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF8 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ss2.ca_county = ss3.ca_county)) otherCondition=() build RFs:RF7 ca_county->[ca_county] ---------------------PhysicalProject -----------------------filter((ss3.d_qoy = 3) and (ss3.d_year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws2.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF6 ca_county->[ca_county,ca_county,ca_county] -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss1.ca_county = ws1.ca_county)) otherCondition=() build RFs:RF5 ca_county->[ca_county,ca_county] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ss1.ca_county = ss2.ca_county)) otherCondition=() build RFs:RF4 ca_county->[ca_county] -----------------------------PhysicalProject -------------------------------filter((ss1.d_qoy = 1) and (ss1.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((ss2.d_qoy = 2) and (ss2.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 ---------------------------PhysicalProject -----------------------------filter((ws1.d_qoy = 1) and (ws1.d_year = 2000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF6 -------------------------PhysicalProject ---------------------------filter((ws2.d_qoy = 2) and (ws2.d_year = 2000)) -----------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query32.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query32.out deleted file mode 100644 index 7992f57d1c87b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query32.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_32 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------filter((cast(cs_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(cs_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) -----------------PhysicalWindow -------------------PhysicalQuickSort[LOCAL_SORT] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((item.i_manufact_id = 29)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-04-07') and (date_dim.d_date >= '1999-01-07')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query33.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query33.out deleted file mode 100644 index 3cc7c048f5784a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query33.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_33 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF3 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Home')) -------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF7 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF7 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Home')) -------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF11 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 ws_item_sk->[i_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF10 RF11 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------filter((item.i_category = 'Home')) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query34.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query34.out deleted file mode 100644 index 79c46ccb77bbb8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query34.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_34 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffle] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer] apply RFs: RF3 -------------filter((dn.cnt <= 20) and (dn.cnt >= 15)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ss_hdemo_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.2) and hd_buy_potential IN ('0-500', '1001-5000')) -------------------------------------PhysicalOlapScan[household_demographics] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_dom <= 28) and (date_dim.d_dom >= 1) and OR[(date_dim.d_dom <= 3),(date_dim.d_dom >= 25)] and d_year IN (1998, 1999, 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Franklin Parish', 'Luce County', 'Richland County', 'Walker County', 'Williamson County', 'Ziebach County')) -----------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query35.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query35.out deleted file mode 100644 index 1e865046f6cf27..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query35.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_35 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF5 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF5 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) ---------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query36.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query36.out deleted file mode 100644 index 78618ea60dfcc0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query36.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_36 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((d1.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter(s_state IN ('AL', 'GA', 'MI', 'MO', 'OH', 'SC', 'SD', 'TN')) -------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query37.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query37.out deleted file mode 100644 index cc63716e4ba212..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query37.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_37 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((item.i_current_price <= 75.00) and (item.i_current_price >= 45.00) and i_manufact_id IN (1000, 707, 747, 856)) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_date <= '1999-04-22') and (date_dim.d_date >= '1999-02-21')) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query38.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query38.out deleted file mode 100644 index 26d52e9fae9c40..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query38.out +++ /dev/null @@ -1,50 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_38 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------PhysicalIntersect -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query39.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query39.out deleted file mode 100644 index 899b1a5e0bdd99..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query39.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_39 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((if((mean = 0.0), 0.0, (stdev / mean)) > 1.0)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalOlapScan[inventory] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1998) and d_moy IN (1, 2)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------hashJoin[INNER_JOIN shuffle] hashCondition=((inv1.i_item_sk = inv2.i_item_sk) and (inv1.w_warehouse_sk = inv2.w_warehouse_sk)) otherCondition=() build RFs:RF3 i_item_sk->[i_item_sk];RF4 w_warehouse_sk->[w_warehouse_sk] -------------filter((inv1.d_moy = 1)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------filter((inv2.d_moy = 2)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query4.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query4.out deleted file mode 100644 index 980ceef87cedc2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query4.out +++ /dev/null @@ -1,75 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_4 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF8 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2000) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF7 customer_id->[customer_id] -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 1999) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.000000)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_c_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF6 customer_id->[customer_id] -----------------------PhysicalProject -------------------------filter((t_c_secyear.dyear = 2000) and (t_c_secyear.sale_type = 'c')) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_c_firstyear.customer_id)) otherCondition=() build RFs:RF5 customer_id->[customer_id,customer_id] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id] -----------------------------PhysicalProject -------------------------------filter((t_s_secyear.dyear = 2000) and (t_s_secyear.sale_type = 's')) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 -----------------------------PhysicalProject -------------------------------filter((t_s_firstyear.dyear = 1999) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.000000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((t_c_firstyear.dyear = 1999) and (t_c_firstyear.sale_type = 'c') and (t_c_firstyear.year_total > 0.000000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query40.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query40.out deleted file mode 100644 index ade38048fb9732..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query40.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_40 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF2 cs_order_number->[cr_order_number];RF3 cs_item_sk->[cr_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2001-05-02') and (date_dim.d_date >= '2001-03-03')) -------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query41.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query41.out deleted file mode 100644 index 3034a77fe0897a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query41.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_41 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_manufact = i1.i_manufact)) otherCondition=() build RFs:RF0 i_manufact->[i_manufact] -------------------PhysicalProject ---------------------filter((i1.i_manufact_id <= 788) and (i1.i_manufact_id >= 748)) -----------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((item_cnt > 0)) -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter(OR[AND[i_color IN ('aquamarine', 'blue', 'chartreuse', 'chiffon', 'dodger', 'gainsboro', 'tan', 'violet'),i_units IN ('Bunch', 'Dozen', 'Each', 'Ounce', 'Oz', 'Pound', 'Ton', 'Tsp'),OR[AND[(item.i_category = 'Women'),i_color IN ('aquamarine', 'gainsboro'),i_units IN ('Dozen', 'Ounce'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('chiffon', 'violet'),i_units IN ('Pound', 'Ton'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('blue', 'chartreuse'),i_units IN ('Each', 'Oz'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('dodger', 'tan'),i_units IN ('Bunch', 'Tsp'),i_size IN ('economy', 'medium')]]],AND[i_color IN ('almond', 'blanched', 'indian', 'lime', 'peru', 'saddle', 'spring', 'tomato'),i_units IN ('Box', 'Carton', 'Case', 'Dram', 'Gram', 'Pallet', 'Tbl', 'Unknown'),OR[AND[(item.i_category = 'Women'),i_color IN ('blanched', 'tomato'),i_units IN ('Case', 'Tbl'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('almond', 'lime'),i_units IN ('Box', 'Dram'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('peru', 'saddle'),i_units IN ('Gram', 'Pallet'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('indian', 'spring'),i_units IN ('Carton', 'Unknown'),i_size IN ('economy', 'medium')]]]] and i_category IN ('Men', 'Women') and i_size IN ('N/A', 'economy', 'extra large', 'large', 'medium', 'small')) ---------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query42.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query42.out deleted file mode 100644 index 2ca590a9d0d4d1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query42.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_42 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((item.i_manager_id = 1)) ---------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter((dt.d_moy = 11) and (dt.d_year = 2002)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query43.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query43.out deleted file mode 100644 index 37ab89010ef0a9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query43.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_43 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -5.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query44.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query44.out deleted file mode 100644 index 86d157354860a4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query44.out +++ /dev/null @@ -1,69 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_44 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((asceding.rnk = descending.rnk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i1.i_item_sk = asceding.item_sk)) otherCondition=() build RFs:RF1 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 146)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i2.i_item_sk = descending.item_sk)) otherCondition=() build RFs:RF0 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 146)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query45.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query45.out deleted file mode 100644 index 40b25ae51ad929..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query45.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_45 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),$c$1]) -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF3 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter(i_item_sk IN (11, 13, 17, 19, 2, 23, 29, 3, 5, 7)) -----------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query46.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query46.out deleted file mode 100644 index 75c9af2b354fae..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query46.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_46 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffle] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (ca_city = bought_city))) -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter(d_dow IN (0, 6) and d_year IN (1999, 2000, 2001)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 6),(household_demographics.hd_vehicle_count = 0)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter(s_city IN ('Centerville', 'Fairview', 'Five Points', 'Liberty', 'Oak Grove')) -----------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=() -----------------PhysicalProject -------------------PhysicalOlapScan[customer] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query47.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query47.out deleted file mode 100644 index 048f93392595ec..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query47.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_47 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter(OR[(date_dim.d_year = 2001),AND[(date_dim.d_year = 2000),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2002),(date_dim.d_moy = 1)]] and d_year IN (2000, 2001, 2002)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1)) and (v1.s_company_name = v1_lead.s_company_name) and (v1.s_store_name = v1_lead.s_store_name)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1)) and (v1.s_company_name = v1_lag.s_company_name) and (v1.s_store_name = v1_lag.s_store_name)) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 s_store_name->[s_store_name];RF6 s_company_name->[s_company_name];RF7 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 2001)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query48.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query48.out deleted file mode 100644 index ad78a4f21c50ed..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query48.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_48 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('IA', 'MD', 'MN'),(store_sales.ss_net_profit <= 2000.00)],AND[ca_state IN ('IL', 'TX', 'VA'),(store_sales.ss_net_profit >= 150.00),(store_sales.ss_net_profit <= 3000.00)],AND[ca_state IN ('IN', 'MI', 'WI'),(store_sales.ss_net_profit >= 50.00)]]) build RFs:RF2 ca_address_sk->[ss_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00)],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]) build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_net_profit <= 25000.00) and (store_sales.ss_net_profit >= 0.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree')]] and cd_education_status IN ('2 yr Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'U', 'W')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 1999)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('IA', 'IL', 'IN', 'MD', 'MI', 'MN', 'TX', 'VA', 'WI')) ---------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query49.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query49.out deleted file mode 100644 index 7d0f6b1ce22d0f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query49.out +++ /dev/null @@ -1,107 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_49 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((ws.ws_item_sk = wr.wr_item_sk) and (ws.ws_order_number = wr.wr_order_number)) otherCondition=() build RFs:RF1 ws_order_number->[wr_order_number];RF2 ws_item_sk->[wr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((wr.wr_return_amt > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF1 RF2 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((ws.ws_net_paid > 0.00) and (ws.ws_net_profit > 1.00) and (ws.ws_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((cs.cs_item_sk = cr.cr_item_sk) and (cs.cs_order_number = cr.cr_order_number)) otherCondition=() build RFs:RF4 cs_order_number->[cr_order_number];RF5 cs_item_sk->[cr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((cr.cr_return_amount > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 RF5 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cs.cs_net_paid > 0.00) and (cs.cs_net_profit > 1.00) and (cs.cs_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((sts.ss_item_sk = sr.sr_item_sk) and (sts.ss_ticket_number = sr.sr_ticket_number)) otherCondition=() build RFs:RF7 ss_ticket_number->[sr_ticket_number];RF8 ss_item_sk->[sr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((sr.sr_return_amt > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF7 RF8 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sts.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sts.ss_net_paid > 0.00) and (sts.ss_net_profit > 1.00) and (sts.ss_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query5.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query5.out deleted file mode 100644 index 32ac590f71d004..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query5.out +++ /dev/null @@ -1,77 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_5 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.store_sk = store.s_store_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk,ss_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cr_returned_date_sk,cs_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_page] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.wsr_web_site_sk = web_site.web_site_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk,ws_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_returns.wr_item_sk = web_sales.ws_item_sk) and (web_returns.wr_order_number = web_sales.ws_order_number)) otherCondition=() build RFs:RF4 wr_item_sk->[ws_item_sk];RF5 wr_order_number->[ws_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query50.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query50.out deleted file mode 100644 index 2f0a1b10cbff1b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query50.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_50 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF1 sr_ticket_number->[ss_ticket_number];RF2 sr_item_sk->[ss_item_sk];RF3 sr_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((d2.d_moy = 8) and (d2.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query51.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query51.out deleted file mode 100644 index 470fabc0f31e81..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query51.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_51 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((web_cumulative > store_cumulative)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((web.d_date = store.d_date) and (web.item_sk = store.item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query52.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query52.out deleted file mode 100644 index 1eff8fc3ba89c1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query52.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_52 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 1)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((dt.d_moy = 12) and (dt.d_year = 2002)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query53.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query53.out deleted file mode 100644 index 89dc632eb527c4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query53.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_53 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_quarterly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_quarterly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_quarterly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query54.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query54.out deleted file mode 100644 index ca44d791dc42aa..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query54.out +++ /dev/null @@ -1,76 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_54 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) <= (d_month_seq + 3)) -----------------------------PhysicalProject -------------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) >= (d_month_seq + 1)) ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((my_customers.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_county = store.s_county) and (customer_address.ca_state = store.s_state)) otherCondition=() build RFs:RF4 s_county->[ca_county];RF5 s_state->[ca_state] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((my_customers.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 RF4 RF5 -------------------------------------------------PhysicalProject ---------------------------------------------------hashAgg[GLOBAL] -----------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------hashAgg[LOCAL] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_customer_sk = cs_or_ws_sales.customer_sk)) otherCondition=() build RFs:RF2 customer_sk->[c_customer_sk] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF2 -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ws_item_sk] ---------------------------------------------------------------------PhysicalUnion -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------filter((item.i_category = 'Women') and (item.i_class = 'maternity')) -------------------------------------------------------------------------PhysicalOlapScan[item] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) ---------------------------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalAssertNumRows -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -----------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalAssertNumRows -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query55.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query55.out deleted file mode 100644 index e24470e9606c8b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query55.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_55 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 100)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query56.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query56.out deleted file mode 100644 index 97c4f27b14edb6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query56.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_56 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF7 cs_bill_addr_sk->[ca_address_sk] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] apply RFs: RF7 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ws_bill_addr_sk->[ca_address_sk] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] apply RFs: RF11 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query57.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query57.out deleted file mode 100644 index 4f23fac89cf958..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query57.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_57 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((call_center.cc_call_center_sk = catalog_sales.cs_call_center_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter(OR[(date_dim.d_year = 1999),AND[(date_dim.d_year = 1998),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2000),(date_dim.d_moy = 1)]] and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[call_center] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.cc_name = v1_lead.cc_name) and (v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1))) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.cc_name = v1_lag.cc_name) and (v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1))) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 cc_name->[cc_name];RF6 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 1999)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query58.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query58.out deleted file mode 100644 index 8b1c60c8c19a1b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query58.out +++ /dev/null @@ -1,86 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_58 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = cs_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 i_item_sk->[cs_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF11 RF12 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF10 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF9 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-03-24')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF13 -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF8 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date = '2001-03-24')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date = '2001-03-24')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query59.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query59.out deleted file mode 100644 index dacb8971d1507e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query59.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_59 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() ---------------PhysicalProject -----------------PhysicalOlapScan[store_sales] ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 52)) and (y.s_store_id1 = x.s_store_id2)) otherCondition=() build RFs:RF5 s_store_id2->[s_store_id] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF3 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1207) and (d.d_month_seq >= 1196)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1219) and (d.d_month_seq >= 1208)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query6.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query6.out deleted file mode 100644 index ddb58c3887ed6f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query6.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_6 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((cnt >= 10)) -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((a.ca_address_sk = c.c_current_addr_sk)) otherCondition=() build RFs:RF5 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_customer_sk = s.ss_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_item_sk = i.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_sold_date_sk = d.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_month_seq = date_dim.d_month_seq)) otherCondition=() build RFs:RF1 d_month_seq->[d_month_seq] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 ---------------------------------------PhysicalAssertNumRows -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((j.i_category = i.i_category)) otherCondition=((cast(i_current_price as DECIMALV3(38, 5)) > (1.2 * avg(cast(i_current_price as DECIMALV3(9, 4)))))) -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query60.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query60.out deleted file mode 100644 index f3678f64aece45..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query60.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_60 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ss_addr_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Children')) -----------------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF7 ca_address_sk->[cs_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ca_address_sk->[ws_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query61.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query61.out deleted file mode 100644 index faf30604b86926..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query61.out +++ /dev/null @@ -1,70 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_61 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 ss_item_sk->[i_item_sk] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] apply RFs: RF10 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF9 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF8 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF8 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 ss_sold_date_sk->[d_date_sk] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF7 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF6 ss_promo_sk->[p_promo_sk] -----------------------------------PhysicalProject -------------------------------------filter(OR[(promotion.p_channel_dmail = 'Y'),(promotion.p_channel_email = 'Y'),(promotion.p_channel_tv = 'Y')]) ---------------------------------------PhysicalOlapScan[promotion] apply RFs: RF6 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 ---------------------------------------PhysicalProject -----------------------------------------filter((store.s_gmt_offset = -7.00)) -------------------------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 ss_item_sk->[i_item_sk] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] apply RFs: RF4 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 ss_sold_date_sk->[d_date_sk] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((store.s_gmt_offset = -7.00)) ---------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query62.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query62.out deleted file mode 100644 index c23bff2b443621..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query62.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_62 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_ship_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1205) and (date_dim.d_month_seq >= 1194)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query63.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query63.out deleted file mode 100644 index 9653f6c52199aa..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query63.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_63 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query64.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query64.out deleted file mode 100644 index 6aa36d179b1db1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query64.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_64 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF17 ss_customer_sk->[c_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] apply RFs: RF17 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[household_demographics] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[income_band] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer_demographics] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF11 ss_item_sk->[sr_item_sk];RF12 ss_ticket_number->[sr_ticket_number] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_returns] apply RFs: RF11 RF12 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF10 ss_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer_address] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF9 ss_cdemo_sk->[cd_demo_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF9 -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[cr_item_sk,cs_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd1.hd_income_band_sk = ib1.ib_income_band_sk)) otherCondition=() ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = hd1.hd_demo_sk)) otherCondition=() -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF3 cs_item_sk->[ss_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 RF8 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter(d_year IN (2001, 2002)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((sale > (2 * refund))) ---------------------------------------------------------------hashAgg[GLOBAL] -----------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------hashAgg[LOCAL] ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF8 -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[household_demographics] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[income_band] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[promotion] ---------------------------------------PhysicalProject -----------------------------------------filter((item.i_current_price <= 33.00) and (item.i_current_price >= 24.00) and i_color IN ('blanched', 'brown', 'burlywood', 'chocolate', 'drab', 'medium')) -------------------------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((cs1.item_sk = cs2.item_sk) and (cs1.store_name = cs2.store_name) and (cs1.store_zip = cs2.store_zip)) otherCondition=((cs2.cnt <= cs1.cnt)) build RFs:RF20 item_sk->[item_sk];RF21 store_name->[store_name];RF22 store_zip->[store_zip] ---------------PhysicalProject -----------------filter((cs1.syear = 2001)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF20 RF21 RF22 ---------------PhysicalProject -----------------filter((cs2.syear = 2002)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query65.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query65.out deleted file mode 100644 index b3f3cfc9bbf481..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query65.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_65 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = sc.ss_item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF2 ss_store_sk->[ss_store_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[item] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query66.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query66.out deleted file mode 100644 index 6017253a3383c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query66.out +++ /dev/null @@ -1,62 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_66 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[ws_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF0 sm_ship_mode_sk->[ws_ship_mode_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 -------------------------------------------PhysicalProject ---------------------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------------------PhysicalOlapScan[ship_mode] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[cs_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF4 sm_ship_mode_sk->[cs_ship_mode_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 -------------------------------------------PhysicalProject ---------------------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------------------PhysicalOlapScan[ship_mode] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query67.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query67.out deleted file mode 100644 index d043d5f25b6c7a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query67.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_67 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((rk <= 100)) -----------PhysicalWindow -------------PhysicalPartitionTopN ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalPartitionTopN -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1217) and (date_dim.d_month_seq >= 1206)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query68.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query68.out deleted file mode 100644 index aa07d1b2a42d9b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query68.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_68 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 c_current_addr_sk->[ca_address_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] apply RFs: RF5 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ss_addr_sk->[ca_address_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Five Points', 'Pleasant Hill')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 8),(household_demographics.hd_vehicle_count = -1)]) ---------------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query69.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query69.out deleted file mode 100644 index a68ff0c1138094..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query69.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_69 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------hashJoin[RIGHT_ANTI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_ship_customer_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF2 c_current_cdemo_sk->[cd_demo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------filter(ca_state IN ('MI', 'TX', 'VA')) -----------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query7.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query7.out deleted file mode 100644 index 18425373b08452..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query7.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_7 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[ss_promo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'College') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'W')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) ---------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query70.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query70.out deleted file mode 100644 index ae5b26647980e7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query70.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_70 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1224) and (d1.d_month_seq >= 1213)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store.s_state = tmp1.s_state)) otherCondition=() build RFs:RF2 s_state->[s_state] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------------------------------------------PhysicalProject -----------------------------------------------------filter((date_dim.d_month_seq <= 1224) and (date_dim.d_month_seq >= 1213)) -------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query71.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query71.out deleted file mode 100644 index 3010f0b574e03b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query71.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_71 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[cs_sold_time_sk,ss_sold_time_sk,ws_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.sold_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ss_item_sk,ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalUnion ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((item.i_manager_id = 1)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1998)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter(t_meal_time IN ('breakfast', 'dinner')) -------------------------PhysicalOlapScan[time_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query72.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query72.out deleted file mode 100644 index 0964cf6998d4ec..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query72.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_72 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_returns.cr_item_sk = catalog_sales.cs_item_sk) and (catalog_returns.cr_order_number = catalog_sales.cs_order_number)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((warehouse.w_warehouse_sk = inventory.inv_warehouse_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = inventory.inv_item_sk) and (inventory.inv_date_sk = d2.d_date_sk)) otherCondition=((inventory.inv_quantity_on_hand < catalog_sales.cs_quantity)) build RFs:RF6 d_date_sk->[inv_date_sk];RF7 cs_item_sk->[inv_item_sk] -------------------------------PhysicalOlapScan[inventory] apply RFs: RF6 RF7 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_week_seq = d2.d_week_seq)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = d3.d_date_sk)) otherCondition=((d3.d_date > days_add(d_date, INTERVAL 5 DAY))) -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF2 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[cs_bill_hdemo_sk] -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 -------------------------------------------------------PhysicalProject ---------------------------------------------------------filter((household_demographics.hd_buy_potential = '501-1000')) -----------------------------------------------------------PhysicalOlapScan[household_demographics] ---------------------------------------------------PhysicalProject -----------------------------------------------------filter((d1.d_year = 2002)) -------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------PhysicalProject -------------------------------------------------filter((customer_demographics.cd_marital_status = 'W')) ---------------------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_returns] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query73.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query73.out deleted file mode 100644 index bfc42f79bbc570..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query73.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_73 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dj.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer] apply RFs: RF3 -------------filter((dj.cnt <= 5) and (dj.cnt >= 1)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.0) and hd_buy_potential IN ('501-1000', 'Unknown')) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Fairfield County', 'Walker County')) -----------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query74.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query74.out deleted file mode 100644 index c3687dadd21872..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query74.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_74 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.0), (year_total / year_total), NULL) > if((year_total > 0.0), (year_total / year_total), NULL))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.sale_type = 'w') and (t_w_secyear.year = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.sale_type = 's') and (t_s_secyear.year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.sale_type = 's') and (t_s_firstyear.year = 1999) and (t_s_firstyear.year_total > 0.0)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year = 1999) and (t_w_firstyear.year_total > 0.0)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query75.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query75.out deleted file mode 100644 index 921d754e533285..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query75.out +++ /dev/null @@ -1,73 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_75 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF2 cs_order_number->[cr_order_number];RF3 cs_item_sk->[cr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Home')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1998, 1999)) ---------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF6 ss_ticket_number->[sr_ticket_number];RF7 ss_item_sk->[sr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_returns] apply RFs: RF6 RF7 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Home')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1998, 1999)) ---------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF10 ws_order_number->[wr_order_number];RF11 ws_item_sk->[wr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_returns] apply RFs: RF10 RF11 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Home')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1998, 1999)) ---------------------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((curr_yr.i_brand_id = prev_yr.i_brand_id) and (curr_yr.i_category_id = prev_yr.i_category_id) and (curr_yr.i_class_id = prev_yr.i_class_id) and (curr_yr.i_manufact_id = prev_yr.i_manufact_id)) otherCondition=(((cast(cast(sales_cnt as DECIMALV3(17, 2)) as DECIMALV3(23, 8)) / cast(sales_cnt as DECIMALV3(17, 2))) < 0.900000)) build RFs:RF12 i_brand_id->[i_brand_id];RF13 i_class_id->[i_class_id];RF14 i_category_id->[i_category_id];RF15 i_manufact_id->[i_manufact_id] ---------------filter((curr_yr.d_year = 1999)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF12 RF13 RF14 RF15 ---------------filter((prev_yr.d_year = 1998)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query76.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query76.out deleted file mode 100644 index 668c3625c56841..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query76.out +++ /dev/null @@ -1,40 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_76 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 ss_sold_date_sk->[d_date_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[date_dim] apply RFs: RF3 -------------------PhysicalProject ---------------------PhysicalUnion -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 ss_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter(ss_hdemo_sk IS NULL) ---------------------------------PhysicalOlapScan[store_sales] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 ws_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter(ws_bill_addr_sk IS NULL) ---------------------------------PhysicalOlapScan[web_sales] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 cs_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF2 -----------------------------PhysicalProject -------------------------------filter(cs_warehouse_sk IS NULL) ---------------------------------PhysicalOlapScan[catalog_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query77.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query77.out deleted file mode 100644 index 3f4330d7466b08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query77.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_77 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ss.s_store_sk = sr.s_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.wp_web_page_sk = wr.wp_web_page_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query78.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query78.out deleted file mode 100644 index a6034ca86ac5c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query78.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_78 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(OR[(coalesce(ws_qty, 0) > 0),(coalesce(cs_qty, 0) > 0)]) -------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((cs.cs_customer_sk = ss.ss_customer_sk) and (cs.cs_item_sk = ss.ss_item_sk) and (cs.cs_sold_year = ss.ss_sold_year)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.ws_customer_sk = ss.ss_customer_sk) and (ws.ws_item_sk = ss.ss_item_sk) and (ws.ws_sold_year = ss.ss_sold_year)) otherCondition=() -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((store_returns.sr_ticket_number = store_sales.ss_ticket_number) and (store_sales.ss_item_sk = store_returns.sr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((web_returns.wr_order_number = web_sales.ws_order_number) and (web_sales.ws_item_sk = web_returns.wr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((catalog_returns.cr_order_number = catalog_sales.cs_order_number) and (catalog_sales.cs_item_sk = catalog_returns.cr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query79.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query79.out deleted file mode 100644 index f57418546e7fb9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query79.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_79 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffle] hashCondition=((ms.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dow = 1) and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 5),(household_demographics.hd_vehicle_count > 4)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((store.s_number_employees <= 295) and (store.s_number_employees >= 200)) -----------------------------PhysicalOlapScan[store] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query8.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query8.out deleted file mode 100644 index fd4ec64bb81546..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query8.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_8 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_substring(s_zip, 1, 2) = expr_substring(ca_zip, 1, 2))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------PhysicalIntersect -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter((cnt > 10)) -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter((customer.c_preferred_cust_flag = 'Y')) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -------------------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -----------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query80.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query80.out deleted file mode 100644 index 5afd260e3e1817..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query80.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_80 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF4 ss_item_sk->[sr_item_sk];RF5 ss_ticket_number->[sr_ticket_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[ss_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((item.i_current_price > 50.00)) -------------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((promotion.p_channel_tv = 'N')) ---------------------------------------------PhysicalOlapScan[promotion] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_catalog_page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF9 cs_item_sk->[cr_item_sk];RF10 cs_order_number->[cr_order_number] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF8 p_promo_sk->[cs_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[cs_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[cs_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF6 RF7 RF8 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((item.i_current_price > 50.00)) -------------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((promotion.p_channel_tv = 'N')) ---------------------------------------------PhysicalOlapScan[promotion] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_page] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF16 ws_item_sk->[wr_item_sk];RF17 ws_order_number->[wr_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] apply RFs: RF16 RF17 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF14 p_promo_sk->[ws_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF13 i_item_sk->[ws_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF12 d_date_sk->[ws_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF12 RF13 RF14 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((item.i_current_price > 50.00)) -------------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((promotion.p_channel_tv = 'N')) ---------------------------------------------PhysicalOlapScan[promotion] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query81.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query81.out deleted file mode 100644 index fdc3edc4efb31f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query81.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_81 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_returns.cr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cr_returned_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[catalog_returns] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2002)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ctr_customer_sk] -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'CA')) ---------------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query82.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query82.out deleted file mode 100644 index 5142d25b09e8a3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query82.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_82 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((item.i_current_price <= 47.00) and (item.i_current_price >= 17.00) and i_manufact_id IN (138, 169, 339, 639)) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_date <= '1999-09-07') and (date_dim.d_date >= '1999-07-09')) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query83.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query83.out deleted file mode 100644 index c6d7033d8cecea..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query83.out +++ /dev/null @@ -1,80 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_83 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = cr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_returns.cr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 cr_item_sk->[i_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[cr_returned_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF11 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF10 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF9 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -----------------------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF8 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_returns.sr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[sr_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[sr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -----------------------------------------PhysicalProject -------------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_returns.wr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 wr_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF3 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[wr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query84.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query84.out deleted file mode 100644 index 82e9098dbdcbb9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query84.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_84 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[sr_cdemo_sk] -------------PhysicalProject ---------------PhysicalOlapScan[store_returns] apply RFs: RF4 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF3 c_current_cdemo_sk->[cd_demo_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter((customer_address.ca_city = 'Oakwood')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((income_band.ib_income_band_sk = household_demographics.hd_income_band_sk)) otherCondition=() build RFs:RF0 ib_income_band_sk->[hd_income_band_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[household_demographics] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((cast(ib_upper_bound as BIGINT) <= 55806) and (income_band.ib_lower_bound >= 5806)) -----------------------------PhysicalOlapScan[income_band] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query85.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query85.out deleted file mode 100644 index 1fed378a9afc1d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query85.out +++ /dev/null @@ -1,46 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_85 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((reason.r_reason_sk = web_returns.wr_reason_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_education_status = cd2.cd_education_status) and (cd1.cd_marital_status = cd2.cd_marital_status) and (cd2.cd_demo_sk = web_returns.wr_returning_cdemo_sk)) otherCondition=() build RFs:RF5 wr_returning_cdemo_sk->[cd_demo_sk];RF6 cd_marital_status->[cd_marital_status];RF7 cd_education_status->[cd_education_status] -----------------------------PhysicalProject -------------------------------filter(cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) ---------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF5 RF6 RF7 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_demo_sk = web_returns.wr_refunded_cdemo_sk)) otherCondition=(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree'),(web_sales.ws_sales_price >= 100.00),(web_sales.ws_sales_price <= 150.00)],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary'),(web_sales.ws_sales_price <= 100.00)],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree'),(web_sales.ws_sales_price >= 150.00)]]) build RFs:RF4 wr_refunded_cdemo_sk->[cd_demo_sk] ---------------------------------PhysicalProject -----------------------------------filter(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree')],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary')],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) -------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF4 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = web_returns.wr_refunded_addr_sk)) otherCondition=(OR[AND[ca_state IN ('DE', 'FL', 'TX'),(web_sales.ws_net_profit >= 100.00),(web_sales.ws_net_profit <= 200.00)],AND[ca_state IN ('ID', 'IN', 'ND'),(web_sales.ws_net_profit >= 150.00)],AND[ca_state IN ('IL', 'MT', 'OH'),(web_sales.ws_net_profit <= 250.00)]]) build RFs:RF3 ca_address_sk->[wr_refunded_addr_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF1 ws_item_sk->[wr_item_sk];RF2 ws_order_number->[wr_order_number] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF1 RF2 RF3 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------filter((web_sales.ws_net_profit <= 300.00) and (web_sales.ws_net_profit >= 50.00) and (web_sales.ws_sales_price <= 200.00) and (web_sales.ws_sales_price >= 50.00)) -------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_year = 2000)) -------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('DE', 'FL', 'ID', 'IL', 'IN', 'MT', 'ND', 'OH', 'TX')) -----------------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[reason] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query86.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query86.out deleted file mode 100644 index c68c8b30e9e929..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query86.out +++ /dev/null @@ -1,28 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_86 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1235) and (d1.d_month_seq >= 1224)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query87.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query87.out deleted file mode 100644 index 9bfa302e37c8cf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query87.out +++ /dev/null @@ -1,48 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_87 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalExcept -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query88.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query88.out deleted file mode 100644 index a619efe9def125..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query88.out +++ /dev/null @@ -1,171 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_88 -- -PhysicalResultSink ---NestedLoopJoin[CROSS_JOIN] -----NestedLoopJoin[CROSS_JOIN] -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF23 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF22 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF21 t_time_sk->[ss_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF21 RF22 RF23 -----------------------------------PhysicalProject -------------------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF20 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF19 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF18 t_time_sk->[ss_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF18 RF19 RF20 -----------------------------------PhysicalProject -------------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute < 30)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF17 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF16 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF15 t_time_sk->[ss_sold_time_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF15 RF16 RF17 ---------------------------------PhysicalProject -----------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute >= 30)) -------------------------------------PhysicalOlapScan[time_dim] -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((store.s_store_name = 'ese')) -----------------------------PhysicalOlapScan[store] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF14 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF13 hd_demo_sk->[ss_hdemo_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF12 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF12 RF13 RF14 -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute < 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((store.s_store_name = 'ese')) ---------------------------PhysicalOlapScan[store] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF11 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF10 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF9 t_time_sk->[ss_sold_time_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 RF10 RF11 -----------------------------PhysicalProject -------------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute >= 30)) ---------------------------------PhysicalOlapScan[time_dim] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((store.s_store_name = 'ese')) -------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF7 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[ss_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute < 30)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((store.s_store_name = 'ese')) -----------------------PhysicalOlapScan[store] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF3 t_time_sk->[ss_sold_time_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF5 -------------------------PhysicalProject ---------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute >= 30)) -----------------------------PhysicalOlapScan[time_dim] ---------------------PhysicalProject -----------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------PhysicalOlapScan[household_demographics] -----------------PhysicalProject -------------------filter((store.s_store_name = 'ese')) ---------------------PhysicalOlapScan[store] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 12) and (time_dim.t_minute < 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------PhysicalOlapScan[household_demographics] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query89.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query89.out deleted file mode 100644 index 2e9294bce91fd2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query89.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_89 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter((if(( not (avg_monthly_sales = 0.0000)), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[i_category IN ('Electronics', 'Jewelry', 'Shoes'),i_class IN ('athletic', 'portable', 'semi-precious')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'maternity', 'rock')]] and i_category IN ('Electronics', 'Jewelry', 'Men', 'Music', 'Shoes', 'Women') and i_class IN ('accessories', 'athletic', 'maternity', 'portable', 'rock', 'semi-precious')) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_year = 1999)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query9.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query9.out deleted file mode 100644 index 06cd8f92785e08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query9.out +++ /dev/null @@ -1,115 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_9 -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------NestedLoopJoin[CROSS_JOIN] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------NestedLoopJoin[CROSS_JOIN] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------NestedLoopJoin[CROSS_JOIN] -------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------------PhysicalProject -----------------------------------------filter((reason.r_reason_sk = 1)) -------------------------------------------PhysicalOlapScan[reason] ---------------------------------------hashAgg[GLOBAL] -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[LOCAL] ---------------------------------------------PhysicalProject -----------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) ---------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------PhysicalOlapScan[store_sales] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -----------------------------------------PhysicalOlapScan[store_sales] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) ---------------------------------------PhysicalOlapScan[store_sales] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -------------------------------------PhysicalOlapScan[store_sales] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -----------------------------------PhysicalOlapScan[store_sales] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) ---------------------------------PhysicalOlapScan[store_sales] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecGather] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -------------------------------PhysicalOlapScan[store_sales] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -----------------------------PhysicalOlapScan[store_sales] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) ---------------------------PhysicalOlapScan[store_sales] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -------------------------PhysicalOlapScan[store_sales] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -----------------------PhysicalOlapScan[store_sales] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) ---------------------PhysicalOlapScan[store_sales] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query90.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query90.out deleted file mode 100644 index 13607b4ae13f5d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query90.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_90 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF5 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ws_ship_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF3 t_time_sk->[ws_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour <= 11) and (time_dim.t_hour >= 10)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 2)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF2 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ws_ship_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ws_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour <= 17) and (time_dim.t_hour >= 16)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 2)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query91.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query91.out deleted file mode 100644 index 6593bb83f07994..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query91.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_91 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returning_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cr_returning_customer_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF3 RF4 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 c_current_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[c_current_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[c_current_cdemo_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'Unknown') and cd_marital_status IN ('M', 'W')) ---------------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------------PhysicalProject ---------------------------------------filter((hd_buy_potential like '1001-5000%')) -----------------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query92.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query92.out deleted file mode 100644 index b31b235e379e59..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query92.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_92 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(ws_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(ws_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -----------------------------PhysicalProject -------------------------------filter((item.i_manufact_id = 320)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2002-05-27') and (date_dim.d_date >= '2002-02-26')) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query93.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query93.out deleted file mode 100644 index 45f02ddf38ee38..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query93.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_93 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((store_returns.sr_item_sk = store_sales.ss_item_sk) and (store_returns.sr_ticket_number = store_sales.ss_ticket_number)) otherCondition=() build RFs:RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_reason_sk = reason.r_reason_sk)) otherCondition=() build RFs:RF0 r_reason_sk->[sr_reason_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((reason.r_reason_desc = 'duplicate purchase')) ---------------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query94.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query94.out deleted file mode 100644 index 6a25137bf51fcf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query94.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_94 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF3 ws_order_number->[ws_order_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF2 web_site_sk->[ws_web_site_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_ship_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[ws_ship_addr_sk] -------------------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((ws1.ws_order_number = wr1.wr_order_number)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((customer_address.ca_state = 'OK')) -----------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2000-04-01') and (date_dim.d_date >= '2000-02-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((web_site.web_company_name = 'pri')) ---------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query95.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query95.out deleted file mode 100644 index ad87360cc8a6ed..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query95.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_95 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number] ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF0 RF7 ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF7 ---PhysicalResultSink -----PhysicalTopN[GATHER_SORT] -------hashAgg[DISTINCT_GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[DISTINCT_LOCAL] -------------hashAgg[GLOBAL] ---------------hashAgg[LOCAL] -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF6 ws_order_number->[wr_order_number,ws_order_number] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF7 ws_order_number->[ws_order_number,ws_order_number] ---------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_ship_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ws_ship_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_state = 'NC')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '1999-04-02') and (date_dim.d_date >= '1999-02-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((web_site.web_company_name = 'pri')) -----------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query96.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query96.out deleted file mode 100644 index c50ffa373c8150..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query96.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_96 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((household_demographics.hd_dep_count = 3)) -----------------------PhysicalOlapScan[household_demographics] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query97.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query97.out deleted file mode 100644 index 4ebfd5abc0eb1c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query97.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_97 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((ssci.customer_sk = csci.customer_sk) and (ssci.item_sk = csci.item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query98.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query98.out deleted file mode 100644 index d1a4251b785e74..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query98.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_98 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '2002-06-19') and (date_dim.d_date >= '2002-05-20')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Music', 'Shoes', 'Sports')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query99.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query99.out deleted file mode 100644 index 6dbec861eaffd3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query99.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_99 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_ship_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1235) and (date_dim.d_month_seq >= 1224)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query1.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query1.out deleted file mode 100644 index 4569832b6d5aa6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query1.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_1 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------PhysicalProject -------------------filter((date_dim.d_year = 2000)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ctr_customer_sk->[c_customer_sk] ---------------PhysicalProject -----------------PhysicalOlapScan[customer] apply RFs: RF3 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_store_sk = ctr2.ctr_store_sk)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF2 ctr_store_sk->[ctr_store_sk,s_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store.s_store_sk = ctr1.ctr_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ctr_store_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 RF2 -----------------------PhysicalProject -------------------------filter((store.s_state = 'SD')) ---------------------------PhysicalOlapScan[store] apply RFs: RF2 -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query10.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query10.out deleted file mode 100644 index 4dfc2de4cf3fe5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query10.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(ca_county IN ('Cochran County', 'Kandiyohi County', 'Marquette County', 'Storey County', 'Warren County')) ---------------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query11.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query11.out deleted file mode 100644 index 8abb7de87e97f9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query11.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_11 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000) > if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2002) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.dyear = 2002) and (t_s_secyear.sale_type = 's')) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.dyear = 2001) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.00)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 2001) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.00)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query12.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query12.out deleted file mode 100644 index be61da2020ee40..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query12.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_12 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ws_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '1998-05-06') and (date_dim.d_date >= '1998-04-06')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Books', 'Men', 'Sports')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query13.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query13.out deleted file mode 100644 index dc2239ca3f9701..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query13.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_13 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(household_demographics.hd_dep_count = 1),cd_marital_status IN ('M', 'S'),cd_education_status IN ('4 yr Degree', 'College'),OR[AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00),(household_demographics.hd_dep_count = 3)]]) build RFs:RF3 ss_cdemo_sk->[cd_demo_sk] -----------------PhysicalProject -------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree')]] and cd_education_status IN ('4 yr Degree', 'College', 'Unknown') and cd_marital_status IN ('D', 'M', 'S')) ---------------------PhysicalOlapScan[customer_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('KS', 'MI', 'SD'),(store_sales.ss_net_profit >= 100.00),(store_sales.ss_net_profit <= 200.00)],AND[ca_state IN ('CO', 'MO', 'ND'),(store_sales.ss_net_profit >= 150.00)],AND[ca_state IN ('NH', 'OH', 'TX'),(store_sales.ss_net_profit <= 250.00)]]) build RFs:RF0 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_net_profit <= 300.00) and (store_sales.ss_net_profit >= 50.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF4 -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('CO', 'KS', 'MI', 'MO', 'ND', 'NH', 'OH', 'SD', 'TX')) ---------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter((date_dim.d_year = 2001)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter(hd_dep_count IN (1, 3)) -------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query14.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query14.out deleted file mode 100644 index 196a98b5a2f51d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query14.out +++ /dev/null @@ -1,154 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_14 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_brand_id = t.brand_id) and (item.i_category_id = t.category_id) and (item.i_class_id = t.class_id)) otherCondition=() build RFs:RF6 brand_id->[i_brand_id];RF7 class_id->[i_class_id];RF8 category_id->[i_category_id] ---------PhysicalProject -----------PhysicalOlapScan[item] apply RFs: RF6 RF7 RF8 ---------PhysicalIntersect -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = iss.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((d1.d_year <= 2002) and (d1.d_year >= 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = ics.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[cs_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------filter((d2.d_year <= 2002) and (d2.d_year >= 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = iws.i_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 -------------------------PhysicalProject ---------------------------filter((d3.d_year <= 2002) and (d3.d_year >= 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------PhysicalProject -------------------PhysicalUnion ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 -----------------PhysicalProject -------------------filter((date_dim.d_year <= 2002) and (date_dim.d_year >= 2000)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalRepeat -----------------------PhysicalUnion -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 i_item_sk->[ss_item_sk,ss_item_sk] -----------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF11 ss_item_sk->[ss_item_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF10 d_date_sk->[ss_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF10 RF11 RF12 -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF12 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF15 ss_item_sk->[cs_item_sk,i_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF14 i_item_sk->[cs_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF13 d_date_sk->[cs_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF13 RF14 RF15 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[item] apply RFs: RF15 -----------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF18 ss_item_sk->[i_item_sk,ws_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF17 i_item_sk->[ws_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF16 d_date_sk->[ws_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF16 RF17 RF18 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[item] apply RFs: RF18 -----------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query15.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query15.out deleted file mode 100644 index b93c82158d296a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query15.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_15 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),ca_state IN ('CA', 'GA', 'WA'),(catalog_sales.cs_sales_price > 500.00)]) build RFs:RF2 c_customer_sk->[cs_bill_customer_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF2 -----------------------PhysicalProject -------------------------filter((date_dim.d_qoy = 1) and (date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query16.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query16.out deleted file mode 100644 index 1733e793f1f9b0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query16.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_16 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((cs1.cs_order_number = cs2.cs_order_number)) otherCondition=(( not (cs_warehouse_sk = cs_warehouse_sk))) build RFs:RF3 cs_order_number->[cs_order_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF2 cc_call_center_sk->[cs_call_center_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_ship_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[cs_ship_addr_sk] -------------------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((cs1.cs_order_number = cr1.cr_order_number)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_returns] -------------------------------PhysicalProject ---------------------------------filter((customer_address.ca_state = 'WV')) -----------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-05-31') and (date_dim.d_date >= '2002-04-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(cc_county IN ('Barrow County', 'Daviess County', 'Luce County', 'Richland County', 'Ziebach County')) ---------------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query17.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query17.out deleted file mode 100644 index 7cc4a196c206c3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query17.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_17 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF8 sr_customer_sk->[cs_bill_customer_sk];RF9 sr_item_sk->[cs_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -------------------------PhysicalProject ---------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[sr_item_sk,ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 RF6 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_quarter_name = '2001Q1')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF5 -------------------------------------PhysicalProject ---------------------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query18.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query18.out deleted file mode 100644 index 57183675eb5fc2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query18.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_18 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF5 cs_item_sk->[i_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF2 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 RF4 -----------------------------------PhysicalProject -------------------------------------filter((cd1.cd_education_status = 'Advanced Degree') and (cd1.cd_gender = 'F')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter(c_birth_month IN (1, 10, 2, 4, 7, 8)) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(ca_state IN ('GA', 'IN', 'ME', 'NC', 'OK', 'WA', 'WY')) -------------------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query19.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query19.out deleted file mode 100644 index c0852398e70d38..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query19.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=(( not (substring(ca_zip, 1, 5) = substring(s_zip, 1, 5)))) build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ss_customer_sk->[c_customer_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((item.i_manager_id = 2)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query2.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query2.out deleted file mode 100644 index 988b288ebb81d5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query2.out +++ /dev/null @@ -1,39 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_2 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = wscs.sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 53))) otherCondition=() ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF2 d_week_seq->[d_week_seq] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF2 -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1998)) -----------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF1 -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1999)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query20.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query20.out deleted file mode 100644 index 16785cbee81da3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query20.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_20 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '2002-02-25') and (date_dim.d_date >= '2002-01-26')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Books', 'Shoes', 'Women')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query21.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query21.out deleted file mode 100644 index e80000c6353128..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query21.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_21 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) <= 1.5) and (if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) >= cast((2.000000 / 3.0) as DOUBLE))) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF2 w_warehouse_sk->[inv_warehouse_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = inventory.inv_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2002-03-29') and (date_dim.d_date >= '2002-01-28')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query22.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query22.out deleted file mode 100644 index a96dc0686f150d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query22.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_22 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_month_seq <= 1199) and (date_dim.d_month_seq >= 1188)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query23.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query23.out deleted file mode 100644 index 0d8b21c19639e9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query23.out +++ /dev/null @@ -1,81 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_23 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((cnt > 4)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter(d_year IN (2000, 2001, 2002, 2003)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#2 ) -----PhysicalCteProducer ( cteId=CTEId#2 ) -------PhysicalProject ---------NestedLoopJoin[INNER_JOIN](cast(ssales as DOUBLE) > cast((0.9500 * tpcds_cmax) as DOUBLE)) -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter(( not ss_customer_sk IS NULL)) -----------------------PhysicalOlapScan[store_sales] -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------filter(( not ss_customer_sk IS NULL)) -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalLimit[GLOBAL] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF5 cs_item_sk->[item_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -------------------PhysicalProject ---------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF8 ws_item_sk->[item_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF7 c_customer_sk->[ws_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query24.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query24.out deleted file mode 100644 index 0dc2c851744de2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query24.out +++ /dev/null @@ -1,52 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_24 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF5 sr_ticket_number->[ss_ticket_number];RF6 sr_item_sk->[i_item_sk,ss_item_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_zip = customer_address.ca_zip) and (store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ca_zip->[s_zip];RF3 c_customer_sk->[ss_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF3 RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((store.s_market_id = 8)) ---------------------------------PhysicalOlapScan[store] apply RFs: RF2 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (c_birth_country = upper(ca_country)))) build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] apply RFs: RF6 -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------NestedLoopJoin[INNER_JOIN](cast(paid as DOUBLE) > cast((0.05 * avg(cast(netpaid as DECIMALV3(38, 4)))) as DOUBLE)) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------filter((ssales.i_color = 'beige')) -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query25.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query25.out deleted file mode 100644 index e4e15d711aeb70..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query25.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_25 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF8 sr_customer_sk->[cs_bill_customer_sk];RF9 sr_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -----------------------PhysicalProject -------------------------filter((d3.d_moy <= 10) and (d3.d_moy >= 4) and (d3.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[sr_item_sk,ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 RF6 -----------------------------------PhysicalProject -------------------------------------filter((d1.d_moy = 4) and (d1.d_year = 2000)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF5 -----------------------------------PhysicalProject -------------------------------------filter((d2.d_moy <= 10) and (d2.d_moy >= 4) and (d2.d_year = 2000)) ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query26.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query26.out deleted file mode 100644 index 0c3a3f432aeb48..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query26.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_26 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[cs_promo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'Unknown') and (customer_demographics.cd_gender = 'M') and (customer_demographics.cd_marital_status = 'S')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) ---------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query27.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query27.out deleted file mode 100644 index c6137e774ae1b9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query27.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_27 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_education_status = 'Secondary') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'D')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter(s_state IN ('AL', 'LA', 'MI', 'MO', 'SC', 'TN')) ---------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query28.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query28.out deleted file mode 100644 index 7a6bdd8868ef00..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query28.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_28 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------NestedLoopJoin[CROSS_JOIN] ---------PhysicalLimit[LOCAL] -----------NestedLoopJoin[CROSS_JOIN] -------------PhysicalLimit[LOCAL] ---------------NestedLoopJoin[CROSS_JOIN] -----------------PhysicalLimit[LOCAL] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------PhysicalLimit[LOCAL] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 5) and (store_sales.ss_quantity >= 0) and OR[AND[(store_sales.ss_list_price >= 131.00),(store_sales.ss_list_price <= 141.00)],AND[(store_sales.ss_coupon_amt >= 16798.00),(store_sales.ss_coupon_amt <= 17798.00)],AND[(store_sales.ss_wholesale_cost >= 25.00),(store_sales.ss_wholesale_cost <= 45.00)]]) -------------------------------------PhysicalOlapScan[store_sales] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 10) and (store_sales.ss_quantity >= 6) and OR[AND[(store_sales.ss_list_price >= 145.00),(store_sales.ss_list_price <= 155.00)],AND[(store_sales.ss_coupon_amt >= 14792.00),(store_sales.ss_coupon_amt <= 15792.00)],AND[(store_sales.ss_wholesale_cost >= 46.00),(store_sales.ss_wholesale_cost <= 66.00)]]) -------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalLimit[LOCAL] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 15) and (store_sales.ss_quantity >= 11) and OR[AND[(store_sales.ss_list_price >= 150.00),(store_sales.ss_list_price <= 160.00)],AND[(store_sales.ss_coupon_amt >= 6600.00),(store_sales.ss_coupon_amt <= 7600.00)],AND[(store_sales.ss_wholesale_cost >= 9.00),(store_sales.ss_wholesale_cost <= 29.00)]]) ---------------------------------PhysicalOlapScan[store_sales] -----------------PhysicalLimit[LOCAL] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 16) and OR[AND[(store_sales.ss_list_price >= 91.00),(store_sales.ss_list_price <= 101.00)],AND[(store_sales.ss_coupon_amt >= 13493.00),(store_sales.ss_coupon_amt <= 14493.00)],AND[(store_sales.ss_wholesale_cost >= 36.00),(store_sales.ss_wholesale_cost <= 56.00)]]) -----------------------------PhysicalOlapScan[store_sales] -------------PhysicalLimit[LOCAL] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 25) and (store_sales.ss_quantity >= 21) and OR[AND[(store_sales.ss_list_price >= 0.00),(store_sales.ss_list_price <= 10.00)],AND[(store_sales.ss_coupon_amt >= 7629.00),(store_sales.ss_coupon_amt <= 8629.00)],AND[(store_sales.ss_wholesale_cost >= 6.00),(store_sales.ss_wholesale_cost <= 26.00)]]) -------------------------PhysicalOlapScan[store_sales] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 30) and (store_sales.ss_quantity >= 26) and OR[AND[(store_sales.ss_list_price >= 89.00),(store_sales.ss_list_price <= 99.00)],AND[(store_sales.ss_coupon_amt >= 15257.00),(store_sales.ss_coupon_amt <= 16257.00)],AND[(store_sales.ss_wholesale_cost >= 31.00),(store_sales.ss_wholesale_cost <= 51.00)]]) ---------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query29.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query29.out deleted file mode 100644 index 0ad4d176c10d08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query29.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_29 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF7 sr_customer_sk->[cs_bill_customer_sk];RF8 sr_item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[sr_item_sk,ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 RF6 ---------------------------------------PhysicalProject -----------------------------------------filter((d1.d_moy = 4) and (d1.d_year = 1999)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF5 ---------------------------------------PhysicalProject -----------------------------------------filter((d2.d_moy <= 7) and (d2.d_moy >= 4) and (d2.d_year = 1999)) -------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------filter(d_year IN (1999, 2000, 2001)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query3.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query3.out deleted file mode 100644 index 4092c73d09fd5b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query3.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_3 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manufact_id = 816)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((dt.d_moy = 11)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query30.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query30.out deleted file mode 100644 index 1fd2b5a1688c12..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query30.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_30 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_returns.wr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[wr_returning_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[wr_returned_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_returns] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2002)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF4 ctr_state->[ctr_state] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ctr_customer_sk] -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'IN')) ---------------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query31.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query31.out deleted file mode 100644 index c768d674841b33..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query31.out +++ /dev/null @@ -1,65 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_31 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------PhysicalProject ---------------------filter((ss.d_year = 2000) and d_qoy IN (1, 2, 3)) -----------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------PhysicalOlapScan[customer_address] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ws_bill_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 ---------------------PhysicalProject -----------------------filter((ws.d_year = 2000) and d_qoy IN (1, 2, 3)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] -----PhysicalResultSink -------PhysicalQuickSort[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalQuickSort[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ws1.ca_county = ws3.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF8 ca_county->[ca_county] -----------------PhysicalProject -------------------filter((ws3.d_qoy = 3) and (ws3.d_year = 2000)) ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF8 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ss2.ca_county = ss3.ca_county)) otherCondition=() build RFs:RF7 ca_county->[ca_county] ---------------------PhysicalProject -----------------------filter((ss3.d_qoy = 3) and (ss3.d_year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws2.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF6 ca_county->[ca_county,ca_county,ca_county] -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss1.ca_county = ws1.ca_county)) otherCondition=() build RFs:RF5 ca_county->[ca_county,ca_county] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ss1.ca_county = ss2.ca_county)) otherCondition=() build RFs:RF4 ca_county->[ca_county] -----------------------------PhysicalProject -------------------------------filter((ss1.d_qoy = 1) and (ss1.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((ss2.d_qoy = 2) and (ss2.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 ---------------------------PhysicalProject -----------------------------filter((ws1.d_qoy = 1) and (ws1.d_year = 2000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF6 -------------------------PhysicalProject ---------------------------filter((ws2.d_qoy = 2) and (ws2.d_year = 2000)) -----------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query32.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query32.out deleted file mode 100644 index 7992f57d1c87b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query32.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_32 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------filter((cast(cs_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(cs_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) -----------------PhysicalWindow -------------------PhysicalQuickSort[LOCAL_SORT] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((item.i_manufact_id = 29)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-04-07') and (date_dim.d_date >= '1999-01-07')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query33.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query33.out deleted file mode 100644 index 3cc7c048f5784a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query33.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_33 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF3 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Home')) -------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF7 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF7 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Home')) -------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF11 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 ws_item_sk->[i_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF10 RF11 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------filter((item.i_category = 'Home')) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query34.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query34.out deleted file mode 100644 index 79c46ccb77bbb8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query34.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_34 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffle] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer] apply RFs: RF3 -------------filter((dn.cnt <= 20) and (dn.cnt >= 15)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ss_hdemo_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.2) and hd_buy_potential IN ('0-500', '1001-5000')) -------------------------------------PhysicalOlapScan[household_demographics] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_dom <= 28) and (date_dim.d_dom >= 1) and OR[(date_dim.d_dom <= 3),(date_dim.d_dom >= 25)] and d_year IN (1998, 1999, 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Franklin Parish', 'Luce County', 'Richland County', 'Walker County', 'Williamson County', 'Ziebach County')) -----------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query35.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query35.out deleted file mode 100644 index dc926eb4b522f0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query35.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_35 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF5 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[c_current_cdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 RF4 RF5 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) ---------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query36.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query36.out deleted file mode 100644 index 92f5563f1a38b5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query36.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_36 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((d1.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter(s_state IN ('AL', 'GA', 'MI', 'MO', 'OH', 'SC', 'SD', 'TN')) -------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query37.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query37.out deleted file mode 100644 index cc63716e4ba212..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query37.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_37 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((item.i_current_price <= 75.00) and (item.i_current_price >= 45.00) and i_manufact_id IN (1000, 707, 747, 856)) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_date <= '1999-04-22') and (date_dim.d_date >= '1999-02-21')) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query38.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query38.out deleted file mode 100644 index ef2051eb2d000c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query38.out +++ /dev/null @@ -1,50 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_38 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------PhysicalIntersect -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ws_bill_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query39.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query39.out deleted file mode 100644 index b7ca740e55c672..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query39.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_39 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((if((mean = 0.0), 0.0, (stdev / mean)) > 1.0)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF2 w_warehouse_sk->[inv_warehouse_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1998) and d_moy IN (1, 2)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------hashJoin[INNER_JOIN shuffle] hashCondition=((inv1.i_item_sk = inv2.i_item_sk) and (inv1.w_warehouse_sk = inv2.w_warehouse_sk)) otherCondition=() build RFs:RF3 i_item_sk->[i_item_sk];RF4 w_warehouse_sk->[w_warehouse_sk] -------------filter((inv1.d_moy = 1)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------filter((inv2.d_moy = 2)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query4.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query4.out deleted file mode 100644 index 709da33d851bff..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query4.out +++ /dev/null @@ -1,75 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_4 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk,ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF8 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2000) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF7 customer_id->[customer_id] -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 1999) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.000000)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_c_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF6 customer_id->[customer_id] -----------------------PhysicalProject -------------------------filter((t_c_secyear.dyear = 2000) and (t_c_secyear.sale_type = 'c')) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_c_firstyear.customer_id)) otherCondition=() build RFs:RF5 customer_id->[customer_id,customer_id] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id] -----------------------------PhysicalProject -------------------------------filter((t_s_secyear.dyear = 2000) and (t_s_secyear.sale_type = 's')) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 -----------------------------PhysicalProject -------------------------------filter((t_s_firstyear.dyear = 1999) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.000000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((t_c_firstyear.dyear = 1999) and (t_c_firstyear.sale_type = 'c') and (t_c_firstyear.year_total > 0.000000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query40.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query40.out deleted file mode 100644 index 5ff27658e2ed3f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query40.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_40 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF4 w_warehouse_sk->[cs_warehouse_sk] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF2 cs_order_number->[cr_order_number];RF3 cs_item_sk->[cr_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF4 -------------------------------PhysicalProject ---------------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2001-05-02') and (date_dim.d_date >= '2001-03-03')) -------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query41.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query41.out deleted file mode 100644 index 3034a77fe0897a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query41.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_41 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_manufact = i1.i_manufact)) otherCondition=() build RFs:RF0 i_manufact->[i_manufact] -------------------PhysicalProject ---------------------filter((i1.i_manufact_id <= 788) and (i1.i_manufact_id >= 748)) -----------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((item_cnt > 0)) -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter(OR[AND[i_color IN ('aquamarine', 'blue', 'chartreuse', 'chiffon', 'dodger', 'gainsboro', 'tan', 'violet'),i_units IN ('Bunch', 'Dozen', 'Each', 'Ounce', 'Oz', 'Pound', 'Ton', 'Tsp'),OR[AND[(item.i_category = 'Women'),i_color IN ('aquamarine', 'gainsboro'),i_units IN ('Dozen', 'Ounce'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('chiffon', 'violet'),i_units IN ('Pound', 'Ton'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('blue', 'chartreuse'),i_units IN ('Each', 'Oz'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('dodger', 'tan'),i_units IN ('Bunch', 'Tsp'),i_size IN ('economy', 'medium')]]],AND[i_color IN ('almond', 'blanched', 'indian', 'lime', 'peru', 'saddle', 'spring', 'tomato'),i_units IN ('Box', 'Carton', 'Case', 'Dram', 'Gram', 'Pallet', 'Tbl', 'Unknown'),OR[AND[(item.i_category = 'Women'),i_color IN ('blanched', 'tomato'),i_units IN ('Case', 'Tbl'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('almond', 'lime'),i_units IN ('Box', 'Dram'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('peru', 'saddle'),i_units IN ('Gram', 'Pallet'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('indian', 'spring'),i_units IN ('Carton', 'Unknown'),i_size IN ('economy', 'medium')]]]] and i_category IN ('Men', 'Women') and i_size IN ('N/A', 'economy', 'extra large', 'large', 'medium', 'small')) ---------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query42.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query42.out deleted file mode 100644 index 2ca590a9d0d4d1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query42.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_42 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((item.i_manager_id = 1)) ---------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter((dt.d_moy = 11) and (dt.d_year = 2002)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query43.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query43.out deleted file mode 100644 index 37ab89010ef0a9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query43.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_43 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -5.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query44.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query44.out deleted file mode 100644 index 86d157354860a4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query44.out +++ /dev/null @@ -1,69 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_44 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((asceding.rnk = descending.rnk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i1.i_item_sk = asceding.item_sk)) otherCondition=() build RFs:RF1 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 146)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i2.i_item_sk = descending.item_sk)) otherCondition=() build RFs:RF0 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 146)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query45.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query45.out deleted file mode 100644 index 6bbc52ecb1a343..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query45.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_45 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),$c$1]) -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ws_bill_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter(i_item_sk IN (11, 13, 17, 19, 2, 23, 29, 3, 5, 7)) -----------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query46.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query46.out deleted file mode 100644 index c1ebbb551053cb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query46.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_46 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffle] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 c_customer_sk->[ss_customer_sk] -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF4 ca_address_sk->[ss_addr_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 ---------------------------------PhysicalProject -----------------------------------filter(d_dow IN (0, 6) and d_year IN (1999, 2000, 2001)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 6),(household_demographics.hd_vehicle_count = 0)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter(s_city IN ('Centerville', 'Fairview', 'Five Points', 'Liberty', 'Oak Grove')) -----------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query47.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query47.out deleted file mode 100644 index ecc4777c22f664..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query47.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_47 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter(OR[(date_dim.d_year = 2001),AND[(date_dim.d_year = 2000),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2002),(date_dim.d_moy = 1)]] and d_year IN (2000, 2001, 2002)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1)) and (v1.s_company_name = v1_lead.s_company_name) and (v1.s_store_name = v1_lead.s_store_name)) otherCondition=() build RFs:RF8 i_category->[i_category,i_category];RF9 i_brand->[i_brand,i_brand];RF10 s_store_name->[s_store_name,s_store_name];RF11 s_company_name->[s_company_name,s_company_name];RF12 expr_(rn - 1)->[(rn + 1),rn] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1)) and (v1.s_company_name = v1_lag.s_company_name) and (v1.s_store_name = v1_lag.s_store_name)) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 s_store_name->[s_store_name];RF6 s_company_name->[s_company_name];RF7 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 RF8 RF9 RF10 RF11 RF12 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 2001)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 RF9 RF10 RF11 RF12 -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query48.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query48.out deleted file mode 100644 index e7bd6986059878..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query48.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_48 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('IA', 'MD', 'MN'),(store_sales.ss_net_profit <= 2000.00)],AND[ca_state IN ('IL', 'TX', 'VA'),(store_sales.ss_net_profit >= 150.00),(store_sales.ss_net_profit <= 3000.00)],AND[ca_state IN ('IN', 'MI', 'WI'),(store_sales.ss_net_profit >= 50.00)]]) build RFs:RF2 ca_address_sk->[ss_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00)],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]) build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_net_profit <= 25000.00) and (store_sales.ss_net_profit >= 0.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree')]] and cd_education_status IN ('2 yr Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'U', 'W')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 1999)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('IA', 'IL', 'IN', 'MD', 'MI', 'MN', 'TX', 'VA', 'WI')) ---------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query49.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query49.out deleted file mode 100644 index 7d0f6b1ce22d0f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query49.out +++ /dev/null @@ -1,107 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_49 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((ws.ws_item_sk = wr.wr_item_sk) and (ws.ws_order_number = wr.wr_order_number)) otherCondition=() build RFs:RF1 ws_order_number->[wr_order_number];RF2 ws_item_sk->[wr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((wr.wr_return_amt > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF1 RF2 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((ws.ws_net_paid > 0.00) and (ws.ws_net_profit > 1.00) and (ws.ws_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((cs.cs_item_sk = cr.cr_item_sk) and (cs.cs_order_number = cr.cr_order_number)) otherCondition=() build RFs:RF4 cs_order_number->[cr_order_number];RF5 cs_item_sk->[cr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((cr.cr_return_amount > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 RF5 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cs.cs_net_paid > 0.00) and (cs.cs_net_profit > 1.00) and (cs.cs_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((sts.ss_item_sk = sr.sr_item_sk) and (sts.ss_ticket_number = sr.sr_ticket_number)) otherCondition=() build RFs:RF7 ss_ticket_number->[sr_ticket_number];RF8 ss_item_sk->[sr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((sr.sr_return_amt > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF7 RF8 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sts.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sts.ss_net_paid > 0.00) and (sts.ss_net_profit > 1.00) and (sts.ss_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query5.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query5.out deleted file mode 100644 index 917f29d09727cf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query5.out +++ /dev/null @@ -1,77 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_5 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[sr_store_sk,ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk,ss_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() build RFs:RF3 cp_catalog_page_sk->[cr_catalog_page_sk,cs_catalog_page_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cr_returned_date_sk,cs_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_page] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.wsr_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF7 web_site_sk->[ws_web_site_sk,ws_web_site_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk,ws_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_returns.wr_item_sk = web_sales.ws_item_sk) and (web_returns.wr_order_number = web_sales.ws_order_number)) otherCondition=() build RFs:RF4 wr_item_sk->[ws_item_sk];RF5 wr_order_number->[ws_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 RF7 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query50.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query50.out deleted file mode 100644 index f5c3f38463d42c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query50.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_50 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF1 sr_ticket_number->[ss_ticket_number];RF2 sr_item_sk->[ss_item_sk];RF3 sr_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((d2.d_moy = 8) and (d2.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query51.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query51.out deleted file mode 100644 index 470fabc0f31e81..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query51.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_51 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((web_cumulative > store_cumulative)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((web.d_date = store.d_date) and (web.item_sk = store.item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query52.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query52.out deleted file mode 100644 index 1eff8fc3ba89c1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query52.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_52 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 1)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((dt.d_moy = 12) and (dt.d_year = 2002)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query53.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query53.out deleted file mode 100644 index 04920e65ac6894..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query53.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_53 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_quarterly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_quarterly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_quarterly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query54.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query54.out deleted file mode 100644 index c2d65d63990fae..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query54.out +++ /dev/null @@ -1,76 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_54 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) <= (d_month_seq + 3)) -----------------------------PhysicalProject -------------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) >= (d_month_seq + 1)) ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((my_customers.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_county = store.s_county) and (customer_address.ca_state = store.s_state)) otherCondition=() build RFs:RF4 s_county->[ca_county];RF5 s_state->[ca_state] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((my_customers.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 RF4 RF5 -------------------------------------------------PhysicalProject ---------------------------------------------------hashAgg[GLOBAL] -----------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------hashAgg[LOCAL] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_customer_sk = cs_or_ws_sales.customer_sk)) otherCondition=() build RFs:RF2 customer_sk->[c_customer_sk] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF2 -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ws_item_sk] ---------------------------------------------------------------------PhysicalUnion -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------filter((item.i_category = 'Women') and (item.i_class = 'maternity')) -------------------------------------------------------------------------PhysicalOlapScan[item] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) ---------------------------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalAssertNumRows -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -----------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalAssertNumRows -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query55.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query55.out deleted file mode 100644 index e24470e9606c8b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query55.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_55 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 100)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query56.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query56.out deleted file mode 100644 index 97c4f27b14edb6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query56.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_56 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF7 cs_bill_addr_sk->[ca_address_sk] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] apply RFs: RF7 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ws_bill_addr_sk->[ca_address_sk] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] apply RFs: RF11 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query57.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query57.out deleted file mode 100644 index 2cab4f33e1358d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query57.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_57 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((call_center.cc_call_center_sk = catalog_sales.cs_call_center_sk)) otherCondition=() build RFs:RF2 cc_call_center_sk->[cs_call_center_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter(OR[(date_dim.d_year = 1999),AND[(date_dim.d_year = 1998),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2000),(date_dim.d_moy = 1)]] and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[call_center] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.cc_name = v1_lead.cc_name) and (v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1))) otherCondition=() build RFs:RF7 i_category->[i_category,i_category];RF8 i_brand->[i_brand,i_brand];RF9 cc_name->[cc_name,cc_name];RF10 expr_(rn - 1)->[(rn + 1),rn] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.cc_name = v1_lag.cc_name) and (v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1))) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 cc_name->[cc_name];RF6 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 RF8 RF9 RF10 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 1999)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 RF8 RF9 RF10 -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query58.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query58.out deleted file mode 100644 index 62cf69cc400980..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query58.out +++ /dev/null @@ -1,86 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_58 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = cs_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 i_item_sk->[cs_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF11 RF12 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF10 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF9 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-03-24')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF13 -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF8 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date = '2001-03-24')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date = '2001-03-24')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query59.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query59.out deleted file mode 100644 index 494750bddc66ee..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query59.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_59 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------PhysicalProject -----------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 52)) and (y.s_store_id1 = x.s_store_id2)) otherCondition=() build RFs:RF5 s_store_id2->[s_store_id] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF3 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1207) and (d.d_month_seq >= 1196)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 RF2 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1219) and (d.d_month_seq >= 1208)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query6.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query6.out deleted file mode 100644 index 43c8732d7f8553..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query6.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_6 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((cnt >= 10)) -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((a.ca_address_sk = c.c_current_addr_sk)) otherCondition=() build RFs:RF5 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_customer_sk = s.ss_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_item_sk = i.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_sold_date_sk = d.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_month_seq = date_dim.d_month_seq)) otherCondition=() build RFs:RF1 d_month_seq->[d_month_seq] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 ---------------------------------------PhysicalAssertNumRows -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((j.i_category = i.i_category)) otherCondition=((cast(i_current_price as DECIMALV3(38, 5)) > (1.2 * avg(cast(i_current_price as DECIMALV3(9, 4)))))) build RFs:RF0 i_category->[i_category] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query60.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query60.out deleted file mode 100644 index f3678f64aece45..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query60.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_60 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ss_addr_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Children')) -----------------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF7 ca_address_sk->[cs_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ca_address_sk->[ws_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query61.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query61.out deleted file mode 100644 index faf30604b86926..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query61.out +++ /dev/null @@ -1,70 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_61 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 ss_item_sk->[i_item_sk] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] apply RFs: RF10 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF9 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF8 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF8 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 ss_sold_date_sk->[d_date_sk] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF7 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF6 ss_promo_sk->[p_promo_sk] -----------------------------------PhysicalProject -------------------------------------filter(OR[(promotion.p_channel_dmail = 'Y'),(promotion.p_channel_email = 'Y'),(promotion.p_channel_tv = 'Y')]) ---------------------------------------PhysicalOlapScan[promotion] apply RFs: RF6 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 ---------------------------------------PhysicalProject -----------------------------------------filter((store.s_gmt_offset = -7.00)) -------------------------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 ss_item_sk->[i_item_sk] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] apply RFs: RF4 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 ss_sold_date_sk->[d_date_sk] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((store.s_gmt_offset = -7.00)) ---------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query62.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query62.out deleted file mode 100644 index 928dd1dc5d5a9a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query62.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_62 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF2 sm_ship_mode_sk->[ws_ship_mode_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[ws_warehouse_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_ship_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1205) and (date_dim.d_month_seq >= 1194)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query63.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query63.out deleted file mode 100644 index d4fb4990da98b8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query63.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_63 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query64.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query64.out deleted file mode 100644 index 2ef8d6701ee4c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query64.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_64 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF19 d_date_sk->[c_first_shipto_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF18 d_date_sk->[c_first_sales_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF17 ss_customer_sk->[c_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() build RFs:RF16 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF15 cd_demo_sk->[c_current_cdemo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() build RFs:RF14 hd_demo_sk->[c_current_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] apply RFs: RF14 RF15 RF16 RF17 RF18 RF19 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() build RFs:RF13 ib_income_band_sk->[hd_income_band_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF13 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[income_band] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer_demographics] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF11 ss_item_sk->[sr_item_sk];RF12 ss_ticket_number->[sr_ticket_number] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_returns] apply RFs: RF11 RF12 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF10 ss_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer_address] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF9 ss_cdemo_sk->[cd_demo_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF9 -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[cr_item_sk,cs_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[ss_promo_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd1.hd_income_band_sk = ib1.ib_income_band_sk)) otherCondition=() build RFs:RF5 ib_income_band_sk->[hd_income_band_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = hd1.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ss_hdemo_sk] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF3 cs_item_sk->[ss_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 RF4 RF6 RF7 RF8 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter(d_year IN (2001, 2002)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((sale > (2 * refund))) ---------------------------------------------------------------hashAgg[GLOBAL] -----------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------hashAgg[LOCAL] ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF0 cr_item_sk->[cs_item_sk];RF1 cr_order_number->[cs_order_number] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF8 -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF8 -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF5 ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[income_band] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[promotion] ---------------------------------------PhysicalProject -----------------------------------------filter((item.i_current_price <= 33.00) and (item.i_current_price >= 24.00) and i_color IN ('blanched', 'brown', 'burlywood', 'chocolate', 'drab', 'medium')) -------------------------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((cs1.item_sk = cs2.item_sk) and (cs1.store_name = cs2.store_name) and (cs1.store_zip = cs2.store_zip)) otherCondition=((cs2.cnt <= cs1.cnt)) build RFs:RF20 item_sk->[item_sk];RF21 store_name->[store_name];RF22 store_zip->[store_zip] ---------------PhysicalProject -----------------filter((cs1.syear = 2001)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF20 RF21 RF22 ---------------PhysicalProject -----------------filter((cs2.syear = 2002)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query65.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query65.out deleted file mode 100644 index 1664203e1172f5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query65.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_65 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk,ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = sc.ss_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF2 ss_store_sk->[ss_store_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF4 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[item] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query66.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query66.out deleted file mode 100644 index a4ff984e4cdc43..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query66.out +++ /dev/null @@ -1,62 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_66 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF3 w_warehouse_sk->[ws_warehouse_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[ws_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF0 sm_ship_mode_sk->[ws_ship_mode_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------------PhysicalProject ---------------------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------------------PhysicalOlapScan[ship_mode] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF7 w_warehouse_sk->[cs_warehouse_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[cs_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF4 sm_ship_mode_sk->[cs_ship_mode_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 RF7 -------------------------------------------PhysicalProject ---------------------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------------------PhysicalOlapScan[ship_mode] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query67.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query67.out deleted file mode 100644 index 09aed607328af2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query67.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_67 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((rk <= 100)) -----------PhysicalWindow -------------PhysicalPartitionTopN ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalPartitionTopN -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1217) and (date_dim.d_month_seq >= 1206)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query68.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query68.out deleted file mode 100644 index aa07d1b2a42d9b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query68.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_68 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 c_current_addr_sk->[ca_address_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] apply RFs: RF5 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ss_addr_sk->[ca_address_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Five Points', 'Pleasant Hill')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 8),(household_demographics.hd_vehicle_count = -1)]) ---------------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query69.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query69.out deleted file mode 100644 index a68ff0c1138094..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query69.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_69 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------hashJoin[RIGHT_ANTI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_ship_customer_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF2 c_current_cdemo_sk->[cd_demo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------filter(ca_state IN ('MI', 'TX', 'VA')) -----------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query7.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query7.out deleted file mode 100644 index 49acfd90e56654..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query7.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_7 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[ss_promo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'College') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'W')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) ---------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query70.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query70.out deleted file mode 100644 index b1074dc6ffeed5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query70.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_70 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1224) and (d1.d_month_seq >= 1213)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store.s_state = tmp1.s_state)) otherCondition=() build RFs:RF2 s_state->[s_state] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------------------PhysicalProject -----------------------------------------------------filter((date_dim.d_month_seq <= 1224) and (date_dim.d_month_seq >= 1213)) -------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query71.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query71.out deleted file mode 100644 index 3010f0b574e03b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query71.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_71 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[cs_sold_time_sk,ss_sold_time_sk,ws_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.sold_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ss_item_sk,ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalUnion ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((item.i_manager_id = 1)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1998)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter(t_meal_time IN ('breakfast', 'dinner')) -------------------------PhysicalOlapScan[time_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query72.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query72.out deleted file mode 100644 index e0d8acf21bdec5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query72.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_72 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_returns.cr_item_sk = catalog_sales.cs_item_sk) and (catalog_returns.cr_order_number = catalog_sales.cs_order_number)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((warehouse.w_warehouse_sk = inventory.inv_warehouse_sk)) otherCondition=() build RFs:RF8 w_warehouse_sk->[inv_warehouse_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = inventory.inv_item_sk) and (inventory.inv_date_sk = d2.d_date_sk)) otherCondition=((inventory.inv_quantity_on_hand < catalog_sales.cs_quantity)) build RFs:RF6 d_date_sk->[inv_date_sk];RF7 cs_item_sk->[inv_item_sk] -------------------------------PhysicalOlapScan[inventory] apply RFs: RF6 RF7 RF8 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_week_seq = d2.d_week_seq)) otherCondition=() build RFs:RF5 d_week_seq->[d_week_seq] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[cs_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = d3.d_date_sk)) otherCondition=((d3.d_date > days_add(d_date, INTERVAL 5 DAY))) build RFs:RF3 d_date_sk->[cs_ship_date_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF2 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[cs_bill_hdemo_sk] -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 -------------------------------------------------------PhysicalProject ---------------------------------------------------------filter((household_demographics.hd_buy_potential = '501-1000')) -----------------------------------------------------------PhysicalOlapScan[household_demographics] ---------------------------------------------------PhysicalProject -----------------------------------------------------filter((d1.d_year = 2002)) -------------------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -----------------------------------------------PhysicalProject -------------------------------------------------filter((customer_demographics.cd_marital_status = 'W')) ---------------------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_returns] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query73.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query73.out deleted file mode 100644 index bfc42f79bbc570..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query73.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_73 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dj.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer] apply RFs: RF3 -------------filter((dj.cnt <= 5) and (dj.cnt >= 1)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.0) and hd_buy_potential IN ('501-1000', 'Unknown')) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Fairfield County', 'Walker County')) -----------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query74.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query74.out deleted file mode 100644 index 421b74396da876..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query74.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_74 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.0), (year_total / year_total), NULL) > if((year_total > 0.0), (year_total / year_total), NULL))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.sale_type = 'w') and (t_w_secyear.year = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.sale_type = 's') and (t_s_secyear.year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.sale_type = 's') and (t_s_firstyear.year = 1999) and (t_s_firstyear.year_total > 0.0)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year = 1999) and (t_w_firstyear.year_total > 0.0)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query75.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query75.out deleted file mode 100644 index 921d754e533285..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query75.out +++ /dev/null @@ -1,73 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_75 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF2 cs_order_number->[cr_order_number];RF3 cs_item_sk->[cr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Home')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1998, 1999)) ---------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF6 ss_ticket_number->[sr_ticket_number];RF7 ss_item_sk->[sr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_returns] apply RFs: RF6 RF7 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Home')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1998, 1999)) ---------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF10 ws_order_number->[wr_order_number];RF11 ws_item_sk->[wr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_returns] apply RFs: RF10 RF11 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Home')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1998, 1999)) ---------------------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((curr_yr.i_brand_id = prev_yr.i_brand_id) and (curr_yr.i_category_id = prev_yr.i_category_id) and (curr_yr.i_class_id = prev_yr.i_class_id) and (curr_yr.i_manufact_id = prev_yr.i_manufact_id)) otherCondition=(((cast(cast(sales_cnt as DECIMALV3(17, 2)) as DECIMALV3(23, 8)) / cast(sales_cnt as DECIMALV3(17, 2))) < 0.900000)) build RFs:RF12 i_brand_id->[i_brand_id];RF13 i_class_id->[i_class_id];RF14 i_category_id->[i_category_id];RF15 i_manufact_id->[i_manufact_id] ---------------filter((curr_yr.d_year = 1999)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF12 RF13 RF14 RF15 ---------------filter((prev_yr.d_year = 1998)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query76.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query76.out deleted file mode 100644 index 668c3625c56841..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query76.out +++ /dev/null @@ -1,40 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_76 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 ss_sold_date_sk->[d_date_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[date_dim] apply RFs: RF3 -------------------PhysicalProject ---------------------PhysicalUnion -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 ss_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter(ss_hdemo_sk IS NULL) ---------------------------------PhysicalOlapScan[store_sales] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 ws_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter(ws_bill_addr_sk IS NULL) ---------------------------------PhysicalOlapScan[web_sales] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 cs_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF2 -----------------------------PhysicalProject -------------------------------filter(cs_warehouse_sk IS NULL) ---------------------------------PhysicalOlapScan[catalog_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query77.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query77.out deleted file mode 100644 index cdecac9706c07d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query77.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_77 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ss.s_store_sk = sr.s_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[sr_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.wp_web_page_sk = wr.wp_web_page_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF9 wp_web_page_sk->[ws_web_page_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF7 wp_web_page_sk->[wr_web_page_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 RF7 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query78.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query78.out deleted file mode 100644 index a6034ca86ac5c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query78.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_78 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(OR[(coalesce(ws_qty, 0) > 0),(coalesce(cs_qty, 0) > 0)]) -------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((cs.cs_customer_sk = ss.ss_customer_sk) and (cs.cs_item_sk = ss.ss_item_sk) and (cs.cs_sold_year = ss.ss_sold_year)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.ws_customer_sk = ss.ss_customer_sk) and (ws.ws_item_sk = ss.ss_item_sk) and (ws.ws_sold_year = ss.ss_sold_year)) otherCondition=() -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((store_returns.sr_ticket_number = store_sales.ss_ticket_number) and (store_sales.ss_item_sk = store_returns.sr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((web_returns.wr_order_number = web_sales.ws_order_number) and (web_sales.ws_item_sk = web_returns.wr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((catalog_returns.cr_order_number = catalog_sales.cs_order_number) and (catalog_sales.cs_item_sk = catalog_returns.cr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query79.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query79.out deleted file mode 100644 index 974fb1da39b734..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query79.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_79 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffle] hashCondition=((ms.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dow = 1) and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 5),(household_demographics.hd_vehicle_count > 4)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((store.s_number_employees <= 295) and (store.s_number_employees >= 200)) -----------------------------PhysicalOlapScan[store] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query8.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query8.out deleted file mode 100644 index a254eeae049f91..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query8.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_8 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_substring(s_zip, 1, 2) = expr_substring(ca_zip, 1, 2))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------PhysicalIntersect -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter((cnt > 10)) -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter((customer.c_preferred_cust_flag = 'Y')) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -------------------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -----------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query80.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query80.out deleted file mode 100644 index b64c3639ecbb6d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query80.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_80 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF4 ss_item_sk->[sr_item_sk];RF5 ss_ticket_number->[sr_ticket_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[ss_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((item.i_current_price > 50.00)) -------------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((promotion.p_channel_tv = 'N')) ---------------------------------------------PhysicalOlapScan[promotion] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_catalog_page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() build RFs:RF11 cp_catalog_page_sk->[cs_catalog_page_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF9 cs_item_sk->[cr_item_sk];RF10 cs_order_number->[cr_order_number] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF8 p_promo_sk->[cs_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[cs_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[cs_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF6 RF7 RF8 RF11 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((item.i_current_price > 50.00)) -------------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((promotion.p_channel_tv = 'N')) ---------------------------------------------PhysicalOlapScan[promotion] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_page] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF16 ws_item_sk->[wr_item_sk];RF17 ws_order_number->[wr_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] apply RFs: RF16 RF17 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF15 web_site_sk->[ws_web_site_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF14 p_promo_sk->[ws_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF13 i_item_sk->[ws_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF12 d_date_sk->[ws_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF12 RF13 RF14 RF15 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((item.i_current_price > 50.00)) -------------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((promotion.p_channel_tv = 'N')) ---------------------------------------------PhysicalOlapScan[promotion] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query81.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query81.out deleted file mode 100644 index e6aef6266d392b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query81.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_81 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_returns.cr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[cr_returning_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cr_returned_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[catalog_returns] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2002)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF4 ctr_state->[ctr_state] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ctr_customer_sk] -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'CA')) ---------------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query82.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query82.out deleted file mode 100644 index 5142d25b09e8a3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query82.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_82 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((item.i_current_price <= 47.00) and (item.i_current_price >= 17.00) and i_manufact_id IN (138, 169, 339, 639)) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_date <= '1999-09-07') and (date_dim.d_date >= '1999-07-09')) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query83.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query83.out deleted file mode 100644 index c6d7033d8cecea..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query83.out +++ /dev/null @@ -1,80 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_83 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = cr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_returns.cr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 cr_item_sk->[i_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[cr_returned_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF11 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF10 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF9 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -----------------------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF8 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_returns.sr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[sr_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[sr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -----------------------------------------PhysicalProject -------------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_returns.wr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 wr_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF3 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[wr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query84.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query84.out deleted file mode 100644 index 82e9098dbdcbb9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query84.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_84 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[sr_cdemo_sk] -------------PhysicalProject ---------------PhysicalOlapScan[store_returns] apply RFs: RF4 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF3 c_current_cdemo_sk->[cd_demo_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter((customer_address.ca_city = 'Oakwood')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((income_band.ib_income_band_sk = household_demographics.hd_income_band_sk)) otherCondition=() build RFs:RF0 ib_income_band_sk->[hd_income_band_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[household_demographics] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((cast(ib_upper_bound as BIGINT) <= 55806) and (income_band.ib_lower_bound >= 5806)) -----------------------------PhysicalOlapScan[income_band] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query85.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query85.out deleted file mode 100644 index be579258fa046a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query85.out +++ /dev/null @@ -1,46 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_85 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF9 wp_web_page_sk->[ws_web_page_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((reason.r_reason_sk = web_returns.wr_reason_sk)) otherCondition=() build RFs:RF8 r_reason_sk->[wr_reason_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_education_status = cd2.cd_education_status) and (cd1.cd_marital_status = cd2.cd_marital_status) and (cd2.cd_demo_sk = web_returns.wr_returning_cdemo_sk)) otherCondition=() build RFs:RF5 wr_returning_cdemo_sk->[cd_demo_sk];RF6 cd_marital_status->[cd_marital_status];RF7 cd_education_status->[cd_education_status] -----------------------------PhysicalProject -------------------------------filter(cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) ---------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF5 RF6 RF7 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_demo_sk = web_returns.wr_refunded_cdemo_sk)) otherCondition=(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree'),(web_sales.ws_sales_price >= 100.00),(web_sales.ws_sales_price <= 150.00)],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary'),(web_sales.ws_sales_price <= 100.00)],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree'),(web_sales.ws_sales_price >= 150.00)]]) build RFs:RF4 wr_refunded_cdemo_sk->[cd_demo_sk] ---------------------------------PhysicalProject -----------------------------------filter(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree')],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary')],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) -------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF4 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = web_returns.wr_refunded_addr_sk)) otherCondition=(OR[AND[ca_state IN ('DE', 'FL', 'TX'),(web_sales.ws_net_profit >= 100.00),(web_sales.ws_net_profit <= 200.00)],AND[ca_state IN ('ID', 'IN', 'ND'),(web_sales.ws_net_profit >= 150.00)],AND[ca_state IN ('IL', 'MT', 'OH'),(web_sales.ws_net_profit <= 250.00)]]) build RFs:RF3 ca_address_sk->[wr_refunded_addr_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF1 ws_item_sk->[wr_item_sk];RF2 ws_order_number->[wr_order_number] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF1 RF2 RF3 RF8 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------filter((web_sales.ws_net_profit <= 300.00) and (web_sales.ws_net_profit >= 50.00) and (web_sales.ws_sales_price <= 200.00) and (web_sales.ws_sales_price >= 50.00)) -------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF9 ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_year = 2000)) -------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('DE', 'FL', 'ID', 'IL', 'IN', 'MT', 'ND', 'OH', 'TX')) -----------------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[reason] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query86.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query86.out deleted file mode 100644 index ebfed4e4aa6c91..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query86.out +++ /dev/null @@ -1,28 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_86 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1235) and (d1.d_month_seq >= 1224)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query87.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query87.out deleted file mode 100644 index e16cd1ad45b6dd..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query87.out +++ /dev/null @@ -1,48 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_87 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalExcept -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ss_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ws_bill_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query88.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query88.out deleted file mode 100644 index a619efe9def125..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query88.out +++ /dev/null @@ -1,171 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_88 -- -PhysicalResultSink ---NestedLoopJoin[CROSS_JOIN] -----NestedLoopJoin[CROSS_JOIN] -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF23 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF22 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF21 t_time_sk->[ss_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF21 RF22 RF23 -----------------------------------PhysicalProject -------------------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF20 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF19 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF18 t_time_sk->[ss_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF18 RF19 RF20 -----------------------------------PhysicalProject -------------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute < 30)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF17 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF16 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF15 t_time_sk->[ss_sold_time_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF15 RF16 RF17 ---------------------------------PhysicalProject -----------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute >= 30)) -------------------------------------PhysicalOlapScan[time_dim] -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((store.s_store_name = 'ese')) -----------------------------PhysicalOlapScan[store] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF14 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF13 hd_demo_sk->[ss_hdemo_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF12 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF12 RF13 RF14 -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute < 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((store.s_store_name = 'ese')) ---------------------------PhysicalOlapScan[store] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF11 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF10 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF9 t_time_sk->[ss_sold_time_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 RF10 RF11 -----------------------------PhysicalProject -------------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute >= 30)) ---------------------------------PhysicalOlapScan[time_dim] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((store.s_store_name = 'ese')) -------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF7 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[ss_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute < 30)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((store.s_store_name = 'ese')) -----------------------PhysicalOlapScan[store] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF3 t_time_sk->[ss_sold_time_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF5 -------------------------PhysicalProject ---------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute >= 30)) -----------------------------PhysicalOlapScan[time_dim] ---------------------PhysicalProject -----------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------PhysicalOlapScan[household_demographics] -----------------PhysicalProject -------------------filter((store.s_store_name = 'ese')) ---------------------PhysicalOlapScan[store] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 12) and (time_dim.t_minute < 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------PhysicalOlapScan[household_demographics] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query89.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query89.out deleted file mode 100644 index e4d2ae3435f174..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query89.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_89 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter((if(( not (avg_monthly_sales = 0.0000)), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[i_category IN ('Electronics', 'Jewelry', 'Shoes'),i_class IN ('athletic', 'portable', 'semi-precious')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'maternity', 'rock')]] and i_category IN ('Electronics', 'Jewelry', 'Men', 'Music', 'Shoes', 'Women') and i_class IN ('accessories', 'athletic', 'maternity', 'portable', 'rock', 'semi-precious')) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_year = 1999)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query9.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query9.out deleted file mode 100644 index 06cd8f92785e08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query9.out +++ /dev/null @@ -1,115 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_9 -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------NestedLoopJoin[CROSS_JOIN] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------NestedLoopJoin[CROSS_JOIN] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------NestedLoopJoin[CROSS_JOIN] -------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------------PhysicalProject -----------------------------------------filter((reason.r_reason_sk = 1)) -------------------------------------------PhysicalOlapScan[reason] ---------------------------------------hashAgg[GLOBAL] -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[LOCAL] ---------------------------------------------PhysicalProject -----------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) ---------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------PhysicalOlapScan[store_sales] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -----------------------------------------PhysicalOlapScan[store_sales] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) ---------------------------------------PhysicalOlapScan[store_sales] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -------------------------------------PhysicalOlapScan[store_sales] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -----------------------------------PhysicalOlapScan[store_sales] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) ---------------------------------PhysicalOlapScan[store_sales] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecGather] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -------------------------------PhysicalOlapScan[store_sales] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -----------------------------PhysicalOlapScan[store_sales] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) ---------------------------PhysicalOlapScan[store_sales] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -------------------------PhysicalOlapScan[store_sales] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -----------------------PhysicalOlapScan[store_sales] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) ---------------------PhysicalOlapScan[store_sales] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query90.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query90.out deleted file mode 100644 index 13607b4ae13f5d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query90.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_90 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF5 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ws_ship_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF3 t_time_sk->[ws_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour <= 11) and (time_dim.t_hour >= 10)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 2)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF2 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ws_ship_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ws_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour <= 17) and (time_dim.t_hour >= 16)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 2)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query91.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query91.out deleted file mode 100644 index 6af6c014d0c298..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query91.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_91 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF5 cc_call_center_sk->[cr_call_center_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returning_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cr_returning_customer_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF3 RF4 RF5 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 c_current_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[c_current_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[c_current_cdemo_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'Unknown') and cd_marital_status IN ('M', 'W')) ---------------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------------PhysicalProject ---------------------------------------filter((hd_buy_potential like '1001-5000%')) -----------------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query92.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query92.out deleted file mode 100644 index b31b235e379e59..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query92.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_92 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(ws_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(ws_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -----------------------------PhysicalProject -------------------------------filter((item.i_manufact_id = 320)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2002-05-27') and (date_dim.d_date >= '2002-02-26')) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query93.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query93.out deleted file mode 100644 index 45f02ddf38ee38..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query93.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_93 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((store_returns.sr_item_sk = store_sales.ss_item_sk) and (store_returns.sr_ticket_number = store_sales.ss_ticket_number)) otherCondition=() build RFs:RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_reason_sk = reason.r_reason_sk)) otherCondition=() build RFs:RF0 r_reason_sk->[sr_reason_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((reason.r_reason_desc = 'duplicate purchase')) ---------------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query94.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query94.out deleted file mode 100644 index 6a25137bf51fcf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query94.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_94 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF3 ws_order_number->[ws_order_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF2 web_site_sk->[ws_web_site_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_ship_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[ws_ship_addr_sk] -------------------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((ws1.ws_order_number = wr1.wr_order_number)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((customer_address.ca_state = 'OK')) -----------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2000-04-01') and (date_dim.d_date >= '2000-02-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((web_site.web_company_name = 'pri')) ---------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query95.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query95.out deleted file mode 100644 index 269d330c090c56..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query95.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_95 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number];RF1 ws_order_number->[ws_order_number] ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF14 RF15 ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF14 RF15 ---PhysicalResultSink -----PhysicalTopN[GATHER_SORT] -------hashAgg[DISTINCT_GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[DISTINCT_LOCAL] -------------hashAgg[GLOBAL] ---------------hashAgg[LOCAL] -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF12 ws_order_number->[wr_order_number,ws_order_number];RF13 ws_order_number->[wr_order_number,ws_order_number] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF10 wr_order_number->[ws_order_number];RF11 wr_order_number->[ws_order_number] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF10 RF11 RF12 RF13 -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_returns] apply RFs: RF12 RF13 -------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF14 ws_order_number->[ws_order_number,ws_order_number];RF15 ws_order_number->[ws_order_number,ws_order_number] ---------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF6 web_site_sk->[ws_web_site_sk];RF7 web_site_sk->[ws_web_site_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_ship_date_sk];RF5 d_date_sk->[ws_ship_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ws_ship_addr_sk];RF3 ca_address_sk->[ws_ship_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 RF4 RF5 RF6 RF7 ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_state = 'NC')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '1999-04-02') and (date_dim.d_date >= '1999-02-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((web_site.web_company_name = 'pri')) -----------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query96.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query96.out deleted file mode 100644 index c50ffa373c8150..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query96.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_96 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((household_demographics.hd_dep_count = 3)) -----------------------PhysicalOlapScan[household_demographics] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query97.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query97.out deleted file mode 100644 index 4ebfd5abc0eb1c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query97.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_97 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((ssci.customer_sk = csci.customer_sk) and (ssci.item_sk = csci.item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query98.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query98.out deleted file mode 100644 index d1a4251b785e74..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query98.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_98 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '2002-06-19') and (date_dim.d_date >= '2002-05-20')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Music', 'Shoes', 'Sports')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query99.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query99.out deleted file mode 100644 index e8094a7d066e20..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query99.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_99 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF3 cc_call_center_sk->[cs_call_center_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF2 sm_ship_mode_sk->[cs_ship_mode_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[cs_warehouse_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_ship_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1235) and (date_dim.d_month_seq >= 1224)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.out deleted file mode 100644 index 0db3330914c18a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_13 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('IL', 'TN', 'TX'),(store_sales.ss_net_profit >= 100.00),(store_sales.ss_net_profit <= 200.00)],AND[ca_state IN ('ID', 'OH', 'WY'),(store_sales.ss_net_profit >= 150.00)],AND[ca_state IN ('IA', 'MS', 'SC'),(store_sales.ss_net_profit <= 250.00)]]) build RFs:RF3 ss_addr_sk->[ca_address_sk] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('IA', 'ID', 'IL', 'MS', 'OH', 'SC', 'TN', 'TX', 'WY')) ---------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=(OR[AND[(household_demographics.hd_dep_count = 1),cd_marital_status IN ('D', 'W'),cd_education_status IN ('2 yr Degree', 'Primary'),OR[AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Primary'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00),(household_demographics.hd_dep_count = 3)]]) build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_net_profit <= 300.00) and (store_sales.ss_net_profit >= 50.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF4 -----------------------------PhysicalProject -------------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Primary')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = '2 yr Degree')]] and cd_education_status IN ('2 yr Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'M', 'W')) ---------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------filter(hd_dep_count IN (1, 3)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2001)) -------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.out deleted file mode 100644 index 64754afd654bc6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (substring(ca_zip, 1, 5) = substring(s_zip, 1, 5)))) build RFs:RF4 c_current_addr_sk->[ca_address_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] apply RFs: RF4 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((item.i_manager_id = 14)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.out deleted file mode 100644 index 5c302c265fc9f3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.out +++ /dev/null @@ -1,69 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_44 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((asceding.rnk = descending.rnk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i1.i_item_sk = asceding.item_sk)) otherCondition=() build RFs:RF1 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 4)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 4) and ss_hdemo_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i2.i_item_sk = descending.item_sk)) otherCondition=() build RFs:RF0 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 4)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 4) and ss_hdemo_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.out deleted file mode 100644 index 72acf1dc6b71ff..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_45 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),$c$1]) -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ws_bill_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_qoy = 1) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter(i_item_sk IN (11, 13, 17, 19, 2, 23, 29, 3, 5, 7)) -----------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.out deleted file mode 100644 index 397a41b34c4e60..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.out +++ /dev/null @@ -1,76 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_54 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) <= (d_month_seq + 3)) -----------------------------PhysicalProject -------------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) >= (d_month_seq + 1)) ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((my_customers.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_county = store.s_county) and (customer_address.ca_state = store.s_state)) otherCondition=() build RFs:RF4 s_county->[ca_county];RF5 s_state->[ca_state] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((my_customers.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 RF4 RF5 -------------------------------------------------PhysicalProject ---------------------------------------------------hashAgg[GLOBAL] -----------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------hashAgg[LOCAL] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_customer_sk = cs_or_ws_sales.customer_sk)) otherCondition=() build RFs:RF2 customer_sk->[c_customer_sk] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF2 -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ws_item_sk] ---------------------------------------------------------------------PhysicalUnion -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------filter((item.i_category = 'Music') and (item.i_class = 'country')) -------------------------------------------------------------------------PhysicalOlapScan[item] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 1999)) ---------------------------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalAssertNumRows -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 1999)) -----------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalAssertNumRows -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 1999)) -------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.out deleted file mode 100644 index d3ec5b9b1b897e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_56 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('orchid', 'pink', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF7 ca_address_sk->[cs_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('orchid', 'pink', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ws_bill_addr_sk->[ca_address_sk] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] apply RFs: RF11 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('orchid', 'pink', 'powder')) ---------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.out deleted file mode 100644 index 43c8732d7f8553..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_6 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((cnt >= 10)) -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((a.ca_address_sk = c.c_current_addr_sk)) otherCondition=() build RFs:RF5 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_customer_sk = s.ss_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_item_sk = i.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_sold_date_sk = d.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_month_seq = date_dim.d_month_seq)) otherCondition=() build RFs:RF1 d_month_seq->[d_month_seq] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 ---------------------------------------PhysicalAssertNumRows -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((j.i_category = i.i_category)) otherCondition=((cast(i_current_price as DECIMALV3(38, 5)) > (1.2 * avg(cast(i_current_price as DECIMALV3(9, 4)))))) build RFs:RF0 i_category->[i_category] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.out deleted file mode 100644 index e768a09ec1494d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.out +++ /dev/null @@ -1,70 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_61 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF10 c_current_addr_sk->[ca_address_sk] -------------------PhysicalProject ---------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------PhysicalOlapScan[customer_address] apply RFs: RF10 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF9 ss_customer_sk->[c_customer_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[ss_promo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[ss_item_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 RF7 RF8 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -------------------------------PhysicalProject ---------------------------------filter(OR[(promotion.p_channel_dmail = 'Y'),(promotion.p_channel_email = 'Y'),(promotion.p_channel_tv = 'Y')]) -----------------------------------PhysicalOlapScan[promotion] ---------------------------PhysicalProject -----------------------------filter((store.s_gmt_offset = -7.00)) -------------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter((item.i_category = 'Home')) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((customer_address.ca_gmt_offset = -7.00)) -------------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -7.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.out deleted file mode 100644 index 2f4fbe401f1315..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_68 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 c_current_addr_sk->[ca_address_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] apply RFs: RF5 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ss_addr_sk->[ca_address_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Fairview', 'Midway')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count = 4)]) ---------------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.out deleted file mode 100644 index a254eeae049f91..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_8 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_substring(s_zip, 1, 2) = expr_substring(ca_zip, 1, 2))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------PhysicalIntersect -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter((cnt > 10)) -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter((customer.c_preferred_cust_flag = 'Y')) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -------------------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -----------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.out deleted file mode 100644 index 9d3c77acb23ca8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_91 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF5 cc_call_center_sk->[cr_call_center_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returning_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cr_returning_customer_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF3 RF4 RF5 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 c_current_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[c_current_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[c_current_cdemo_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'Unknown') and cd_marital_status IN ('M', 'W')) ---------------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------------PhysicalProject ---------------------------------------filter((hd_buy_potential like 'Unknown%')) -----------------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.out deleted file mode 100644 index 21c6fa60d37b75..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_95 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number] ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF0 RF7 ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF7 ---PhysicalResultSink -----PhysicalTopN[GATHER_SORT] -------hashAgg[DISTINCT_GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[DISTINCT_LOCAL] -------------hashAgg[GLOBAL] ---------------hashAgg[LOCAL] -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF6 ws_order_number->[wr_order_number,ws_order_number] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF5 wr_order_number->[ws_order_number] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF7 ws_order_number->[ws_order_number,ws_order_number] ---------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_ship_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ws_ship_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_state = 'VA')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '2001-05-31') and (date_dim.d_date >= '2001-04-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((web_site.web_company_name = 'pri')) -----------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.out deleted file mode 100644 index 78fd7c847c29ed..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 6) and (date_dim.d_moy >= 3) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy <= 6) and (date_dim.d_moy >= 3) and (date_dim.d_year = 2001)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy <= 6) and (date_dim.d_moy >= 3) and (date_dim.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(ca_county IN ('Campbell County', 'Cleburne County', 'Escambia County', 'Fairfield County', 'Washtenaw County')) ---------------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query1.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query1.out deleted file mode 100644 index e4a4bd5e427fa9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query1.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_1 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------PhysicalProject -------------------filter((date_dim.d_year = 2000)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ctr_customer_sk->[c_customer_sk] ---------------PhysicalProject -----------------PhysicalOlapScan[customer] apply RFs: RF3 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_store_sk = ctr2.ctr_store_sk)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF2 ctr_store_sk->[ctr_store_sk,s_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store.s_store_sk = ctr1.ctr_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ctr_store_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 RF2 -----------------------PhysicalProject -------------------------filter((store.s_state = 'TN')) ---------------------------PhysicalOlapScan[store] apply RFs: RF2 -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query10.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query10.out deleted file mode 100644 index 78fd7c847c29ed..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query10.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 6) and (date_dim.d_moy >= 3) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy <= 6) and (date_dim.d_moy >= 3) and (date_dim.d_year = 2001)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy <= 6) and (date_dim.d_moy >= 3) and (date_dim.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(ca_county IN ('Campbell County', 'Cleburne County', 'Escambia County', 'Fairfield County', 'Washtenaw County')) ---------------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query11.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query11.out deleted file mode 100644 index 35504b7f44d24e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query11.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_11 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1998, 1999)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1998, 1999)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000) > if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 1999) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.dyear = 1999) and (t_s_secyear.sale_type = 's')) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.dyear = 1998) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.00)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 1998) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.00)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query12.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query12.out deleted file mode 100644 index f46e97e8a5b3c6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query12.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_12 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ws_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '2001-07-15') and (date_dim.d_date >= '2001-06-15')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Books', 'Electronics', 'Men')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query13.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query13.out deleted file mode 100644 index 0db3330914c18a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query13.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_13 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('IL', 'TN', 'TX'),(store_sales.ss_net_profit >= 100.00),(store_sales.ss_net_profit <= 200.00)],AND[ca_state IN ('ID', 'OH', 'WY'),(store_sales.ss_net_profit >= 150.00)],AND[ca_state IN ('IA', 'MS', 'SC'),(store_sales.ss_net_profit <= 250.00)]]) build RFs:RF3 ss_addr_sk->[ca_address_sk] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('IA', 'ID', 'IL', 'MS', 'OH', 'SC', 'TN', 'TX', 'WY')) ---------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=(OR[AND[(household_demographics.hd_dep_count = 1),cd_marital_status IN ('D', 'W'),cd_education_status IN ('2 yr Degree', 'Primary'),OR[AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Primary'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00),(household_demographics.hd_dep_count = 3)]]) build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_net_profit <= 300.00) and (store_sales.ss_net_profit >= 50.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF4 -----------------------------PhysicalProject -------------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Primary')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = '2 yr Degree')]] and cd_education_status IN ('2 yr Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'M', 'W')) ---------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------filter(hd_dep_count IN (1, 3)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2001)) -------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query14.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query14.out deleted file mode 100644 index 61f29b11211346..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query14.out +++ /dev/null @@ -1,152 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_14 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_brand_id = t.brand_id) and (item.i_category_id = t.category_id) and (item.i_class_id = t.class_id)) otherCondition=() build RFs:RF6 brand_id->[i_brand_id];RF7 class_id->[i_class_id];RF8 category_id->[i_category_id] ---------PhysicalProject -----------PhysicalOlapScan[item] apply RFs: RF6 RF7 RF8 ---------PhysicalIntersect -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = iss.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((d1.d_year <= 2001) and (d1.d_year >= 1999)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = ics.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[cs_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------filter((d2.d_year <= 2001) and (d2.d_year >= 1999)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = iws.i_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 -------------------------PhysicalProject ---------------------------filter((d3.d_year <= 2001) and (d3.d_year >= 1999)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------PhysicalProject -------------------PhysicalUnion ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 -----------------PhysicalProject -------------------filter((date_dim.d_year <= 2001) and (date_dim.d_year >= 1999)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalRepeat -----------------------PhysicalUnion -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 i_item_sk->[ss_item_sk,ss_item_sk] -----------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF11 ss_item_sk->[ss_item_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF10 d_date_sk->[ss_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF10 RF11 RF12 -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) ---------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF12 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF15 i_item_sk->[cs_item_sk,ss_item_sk] -----------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF14 ss_item_sk->[cs_item_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF13 d_date_sk->[cs_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF13 RF14 RF15 -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) ---------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF15 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF18 i_item_sk->[ss_item_sk,ws_item_sk] -----------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF17 ss_item_sk->[ws_item_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF16 d_date_sk->[ws_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF16 RF17 RF18 -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) ---------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF18 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query15.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query15.out deleted file mode 100644 index fe0fd80c2f8b05..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query15.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_15 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),ca_state IN ('CA', 'GA', 'WA'),(catalog_sales.cs_sales_price > 500.00)]) build RFs:RF2 c_customer_sk->[cs_bill_customer_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF2 -----------------------PhysicalProject -------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query16.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query16.out deleted file mode 100644 index db5bf9b39a05ec..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query16.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_16 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((cs1.cs_order_number = cs2.cs_order_number)) otherCondition=(( not (cs_warehouse_sk = cs_warehouse_sk))) build RFs:RF4 cs_order_number->[cs_order_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 -------------------hashJoin[RIGHT_ANTI_JOIN shuffle] hashCondition=((cs1.cs_order_number = cr1.cr_order_number)) otherCondition=() build RFs:RF3 cs_order_number->[cr_order_number] ---------------------PhysicalProject -----------------------PhysicalOlapScan[catalog_returns] apply RFs: RF3 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF2 cc_call_center_sk->[cs_call_center_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_ship_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[cs_ship_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_state = 'PA')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '2002-05-31') and (date_dim.d_date >= '2002-04-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((call_center.cc_county = 'Williamson County')) -----------------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query17.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query17.out deleted file mode 100644 index 12fa11701b619f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query17.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_17 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF8 sr_customer_sk->[cs_bill_customer_sk];RF9 sr_item_sk->[cs_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -------------------------PhysicalProject ---------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[sr_item_sk,ss_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 RF6 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_quarter_name = '2001Q1')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF6 -------------------------------------PhysicalProject ---------------------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query18.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query18.out deleted file mode 100644 index ea401d9c36dc08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query18.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_18 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF2 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 RF4 RF5 -----------------------------------PhysicalProject -------------------------------------filter((cd1.cd_education_status = 'Primary') and (cd1.cd_gender = 'F')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter(c_birth_month IN (1, 10, 11, 3, 4, 7)) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(ca_state IN ('AL', 'CA', 'GA', 'IN', 'MO', 'MT', 'TN')) -------------------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query19.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query19.out deleted file mode 100644 index 64754afd654bc6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query19.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (substring(ca_zip, 1, 5) = substring(s_zip, 1, 5)))) build RFs:RF4 c_current_addr_sk->[ca_address_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] apply RFs: RF4 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((item.i_manager_id = 14)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query2.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query2.out deleted file mode 100644 index 988b288ebb81d5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query2.out +++ /dev/null @@ -1,39 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_2 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = wscs.sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 53))) otherCondition=() ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF2 d_week_seq->[d_week_seq] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF2 -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1998)) -----------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF1 -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1999)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query20.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query20.out deleted file mode 100644 index 8728415de8b335..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query20.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_20 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '2002-07-18') and (date_dim.d_date >= '2002-06-18')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Books', 'Music', 'Sports')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query21.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query21.out deleted file mode 100644 index f68b978b0b2ba6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query21.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_21 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) <= 1.5) and (if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) >= cast((2.000000 / 3.0) as DOUBLE))) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF2 w_warehouse_sk->[inv_warehouse_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = inventory.inv_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '1999-07-22') and (date_dim.d_date >= '1999-05-23')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query22.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query22.out deleted file mode 100644 index 09dedb98772f96..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query22.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_22 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1211) and (date_dim.d_month_seq >= 1200)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query23.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query23.out deleted file mode 100644 index 45d7a47eb7e46a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query23.out +++ /dev/null @@ -1,81 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_23 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((cnt > 4)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter(d_year IN (2000, 2001, 2002, 2003)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#2 ) -----PhysicalCteProducer ( cteId=CTEId#2 ) -------PhysicalProject ---------NestedLoopJoin[INNER_JOIN](cast(ssales as DOUBLE) > cast((0.9500 * tpcds_cmax) as DOUBLE)) -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter(( not ss_customer_sk IS NULL)) -----------------------PhysicalOlapScan[store_sales] -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------filter(( not ss_customer_sk IS NULL)) -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalLimit[GLOBAL] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF5 cs_item_sk->[item_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 7) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -------------------PhysicalProject ---------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF8 ws_item_sk->[item_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF7 c_customer_sk->[ws_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 7) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query24.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query24.out deleted file mode 100644 index d93b73ade16ecb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query24.out +++ /dev/null @@ -1,52 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_24 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF5 sr_ticket_number->[ss_ticket_number];RF6 sr_item_sk->[i_item_sk,ss_item_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_zip = customer_address.ca_zip) and (store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ca_zip->[s_zip];RF3 c_customer_sk->[ss_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF3 RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((store.s_market_id = 5)) ---------------------------------PhysicalOlapScan[store] apply RFs: RF2 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (c_birth_country = upper(ca_country)))) build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] apply RFs: RF6 -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------NestedLoopJoin[INNER_JOIN](cast(paid as DOUBLE) > cast((0.05 * avg(cast(netpaid as DECIMALV3(38, 4)))) as DOUBLE)) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------filter((ssales.i_color = 'aquamarine')) -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query25.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query25.out deleted file mode 100644 index 8ccafdc60f8ba4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query25.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_25 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF8 sr_customer_sk->[cs_bill_customer_sk];RF9 sr_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -----------------------PhysicalProject -------------------------filter((d3.d_moy <= 10) and (d3.d_moy >= 4) and (d3.d_year = 1999)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[sr_item_sk,ss_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 RF6 -----------------------------------PhysicalProject -------------------------------------filter((d1.d_moy = 4) and (d1.d_year = 1999)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF6 -----------------------------------PhysicalProject -------------------------------------filter((d2.d_moy <= 10) and (d2.d_moy >= 4) and (d2.d_year = 1999)) ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query26.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query26.out deleted file mode 100644 index 383242890f9dd4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query26.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_26 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[cs_promo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'Unknown') and (customer_demographics.cd_gender = 'M') and (customer_demographics.cd_marital_status = 'W')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2002)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) ---------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query27.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query27.out deleted file mode 100644 index 0fa387fb0d6bb9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query27.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_27 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_education_status = 'Secondary') and (customer_demographics.cd_gender = 'M') and (customer_demographics.cd_marital_status = 'W')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------filter((store.s_state = 'TN')) -----------------------------------PhysicalOlapScan[store] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query28.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query28.out deleted file mode 100644 index 36ec7305d96abb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query28.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_28 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------NestedLoopJoin[CROSS_JOIN] ---------PhysicalLimit[LOCAL] -----------NestedLoopJoin[CROSS_JOIN] -------------PhysicalLimit[LOCAL] ---------------NestedLoopJoin[CROSS_JOIN] -----------------PhysicalLimit[LOCAL] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------PhysicalLimit[LOCAL] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 5) and (store_sales.ss_quantity >= 0) and OR[AND[(store_sales.ss_list_price >= 107.00),(store_sales.ss_list_price <= 117.00)],AND[(store_sales.ss_coupon_amt >= 1319.00),(store_sales.ss_coupon_amt <= 2319.00)],AND[(store_sales.ss_wholesale_cost >= 60.00),(store_sales.ss_wholesale_cost <= 80.00)]]) -------------------------------------PhysicalOlapScan[store_sales] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 10) and (store_sales.ss_quantity >= 6) and OR[AND[(store_sales.ss_list_price >= 23.00),(store_sales.ss_list_price <= 33.00)],AND[(store_sales.ss_coupon_amt >= 825.00),(store_sales.ss_coupon_amt <= 1825.00)],AND[(store_sales.ss_wholesale_cost >= 43.00),(store_sales.ss_wholesale_cost <= 63.00)]]) -------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalLimit[LOCAL] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 15) and (store_sales.ss_quantity >= 11) and OR[AND[(store_sales.ss_list_price >= 74.00),(store_sales.ss_list_price <= 84.00)],AND[(store_sales.ss_coupon_amt >= 4381.00),(store_sales.ss_coupon_amt <= 5381.00)],AND[(store_sales.ss_wholesale_cost >= 57.00),(store_sales.ss_wholesale_cost <= 77.00)]]) ---------------------------------PhysicalOlapScan[store_sales] -----------------PhysicalLimit[LOCAL] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 16) and OR[AND[(store_sales.ss_list_price >= 89.00),(store_sales.ss_list_price <= 99.00)],AND[(store_sales.ss_coupon_amt >= 3117.00),(store_sales.ss_coupon_amt <= 4117.00)],AND[(store_sales.ss_wholesale_cost >= 68.00),(store_sales.ss_wholesale_cost <= 88.00)]]) -----------------------------PhysicalOlapScan[store_sales] -------------PhysicalLimit[LOCAL] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 25) and (store_sales.ss_quantity >= 21) and OR[AND[(store_sales.ss_list_price >= 58.00),(store_sales.ss_list_price <= 68.00)],AND[(store_sales.ss_coupon_amt >= 9402.00),(store_sales.ss_coupon_amt <= 10402.00)],AND[(store_sales.ss_wholesale_cost >= 38.00),(store_sales.ss_wholesale_cost <= 58.00)]]) -------------------------PhysicalOlapScan[store_sales] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 30) and (store_sales.ss_quantity >= 26) and OR[AND[(store_sales.ss_list_price >= 64.00),(store_sales.ss_list_price <= 74.00)],AND[(store_sales.ss_coupon_amt >= 5792.00),(store_sales.ss_coupon_amt <= 6792.00)],AND[(store_sales.ss_wholesale_cost >= 73.00),(store_sales.ss_wholesale_cost <= 93.00)]]) ---------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query29.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query29.out deleted file mode 100644 index 649a6f83d9759a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query29.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_29 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF7 sr_customer_sk->[cs_bill_customer_sk];RF8 sr_item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[sr_item_sk,ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 RF6 ---------------------------------------PhysicalProject -----------------------------------------filter((d1.d_moy = 4) and (d1.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF6 ---------------------------------------PhysicalProject -----------------------------------------filter((d2.d_moy <= 7) and (d2.d_moy >= 4) and (d2.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter(d_year IN (1998, 1999, 2000)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query3.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query3.out deleted file mode 100644 index 4092c73d09fd5b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query3.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_3 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manufact_id = 816)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((dt.d_moy = 11)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query30.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query30.out deleted file mode 100644 index c894fcceff19a5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query30.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_30 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_returns.wr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[wr_returning_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[wr_returned_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_returns] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2000)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF4 ctr_state->[ctr_state] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ctr_customer_sk] -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'AR')) ---------------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query31.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query31.out deleted file mode 100644 index f17ab6cf863612..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query31.out +++ /dev/null @@ -1,65 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_31 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------PhysicalProject ---------------------filter((ss.d_year = 1999) and d_qoy IN (1, 2, 3)) -----------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------PhysicalOlapScan[customer_address] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ws_bill_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 ---------------------PhysicalProject -----------------------filter((ws.d_year = 1999) and d_qoy IN (1, 2, 3)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] -----PhysicalResultSink -------PhysicalQuickSort[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalQuickSort[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ws1.ca_county = ws3.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF8 ca_county->[ca_county] -----------------PhysicalProject -------------------filter((ws3.d_qoy = 3) and (ws3.d_year = 1999)) ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF8 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ss2.ca_county = ss3.ca_county)) otherCondition=() build RFs:RF7 ca_county->[ca_county] ---------------------PhysicalProject -----------------------filter((ss3.d_qoy = 3) and (ss3.d_year = 1999)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws2.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF6 ca_county->[ca_county,ca_county,ca_county] -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss1.ca_county = ws1.ca_county)) otherCondition=() build RFs:RF5 ca_county->[ca_county,ca_county] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ss1.ca_county = ss2.ca_county)) otherCondition=() build RFs:RF4 ca_county->[ca_county] -----------------------------PhysicalProject -------------------------------filter((ss1.d_qoy = 1) and (ss1.d_year = 1999)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((ss2.d_qoy = 2) and (ss2.d_year = 1999)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 ---------------------------PhysicalProject -----------------------------filter((ws1.d_qoy = 1) and (ws1.d_year = 1999)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF6 -------------------------PhysicalProject ---------------------------filter((ws2.d_qoy = 2) and (ws2.d_year = 1999)) -----------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query32.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query32.out deleted file mode 100644 index c413940debc6e7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query32.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_32 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------filter((cast(cs_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(cs_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) -----------------PhysicalWindow -------------------PhysicalQuickSort[LOCAL_SORT] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((item.i_manufact_id = 722)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2001-06-07') and (date_dim.d_date >= '2001-03-09')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query33.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query33.out deleted file mode 100644 index 721bd289919ecf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query33.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_33 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF3 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Books')) -------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF7 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF7 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Books')) -------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF11 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 RF10 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF11 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Books')) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query34.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query34.out deleted file mode 100644 index c75d4fc3e18155..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query34.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_34 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer] apply RFs: RF3 -------------filter((dn.cnt <= 20) and (dn.cnt >= 15)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((store.s_county = 'Williamson County')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.2) and hd_buy_potential IN ('0-500', '1001-5000')) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((date_dim.d_dom <= 28) and (date_dim.d_dom >= 1) and OR[(date_dim.d_dom <= 3),(date_dim.d_dom >= 25)] and d_year IN (2000, 2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query35.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query35.out deleted file mode 100644 index 9012700621a358..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query35.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_35 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF5 cd_demo_sk->[c_current_cdemo_sk] ---------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -----------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[ss_customer_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 1999)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF2 RF5 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 1999)) -------------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer_demographics] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query36.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query36.out deleted file mode 100644 index e31b175d47d2c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query36.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_36 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((store.s_state = 'TN')) ---------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query37.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query37.out deleted file mode 100644 index 149a61f7b37054..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query37.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_37 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((item.i_current_price <= 59.00) and (item.i_current_price >= 29.00) and i_manufact_id IN (705, 742, 777, 944)) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_date <= '2002-05-28') and (date_dim.d_date >= '2002-03-29')) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query38.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query38.out deleted file mode 100644 index dc794c95f97b2f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query38.out +++ /dev/null @@ -1,50 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_38 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------PhysicalIntersect -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ws_bill_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1200) and (date_dim.d_month_seq >= 1189)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1200) and (date_dim.d_month_seq >= 1189)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1200) and (date_dim.d_month_seq >= 1189)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query39.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query39.out deleted file mode 100644 index 7b00628d966265..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query39.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_39 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((if((mean = 0.0), 0.0, (stdev / mean)) > 1.0)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[inv_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[inv_warehouse_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2000) and d_moy IN (1, 2)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[warehouse] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------hashJoin[INNER_JOIN shuffle] hashCondition=((inv1.i_item_sk = inv2.i_item_sk) and (inv1.w_warehouse_sk = inv2.w_warehouse_sk)) otherCondition=() build RFs:RF3 i_item_sk->[i_item_sk];RF4 w_warehouse_sk->[w_warehouse_sk] -------------filter((inv1.d_moy = 1)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------filter((inv2.d_moy = 2)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query4.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query4.out deleted file mode 100644 index 709da33d851bff..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query4.out +++ /dev/null @@ -1,75 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_4 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk,ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF8 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2000) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF7 customer_id->[customer_id] -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 1999) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.000000)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_c_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF6 customer_id->[customer_id] -----------------------PhysicalProject -------------------------filter((t_c_secyear.dyear = 2000) and (t_c_secyear.sale_type = 'c')) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_c_firstyear.customer_id)) otherCondition=() build RFs:RF5 customer_id->[customer_id,customer_id] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id] -----------------------------PhysicalProject -------------------------------filter((t_s_secyear.dyear = 2000) and (t_s_secyear.sale_type = 's')) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 -----------------------------PhysicalProject -------------------------------filter((t_s_firstyear.dyear = 1999) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.000000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((t_c_firstyear.dyear = 1999) and (t_c_firstyear.sale_type = 'c') and (t_c_firstyear.year_total > 0.000000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query40.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query40.out deleted file mode 100644 index 041e5711184598..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query40.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_40 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF4 w_warehouse_sk->[cs_warehouse_sk] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF2 cs_order_number->[cr_order_number];RF3 cs_item_sk->[cr_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF4 -------------------------------PhysicalProject ---------------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2001-06-01') and (date_dim.d_date >= '2001-04-02')) -------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query41.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query41.out deleted file mode 100644 index 0bba60d4cdac39..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query41.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_41 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_manufact = i1.i_manufact)) otherCondition=() build RFs:RF0 i_manufact->[i_manufact] -------------------PhysicalProject ---------------------filter((i1.i_manufact_id <= 744) and (i1.i_manufact_id >= 704)) -----------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((item_cnt > 0)) -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter(OR[AND[i_color IN ('forest', 'lime', 'maroon', 'navy', 'powder', 'sky', 'slate', 'smoke'),i_units IN ('Bunch', 'Case', 'Dozen', 'Gross', 'Lb', 'Ounce', 'Pallet', 'Pound'),OR[AND[(item.i_category = 'Women'),i_color IN ('forest', 'lime'),i_units IN ('Pallet', 'Pound'),i_size IN ('economy', 'small')],AND[(item.i_category = 'Women'),i_color IN ('navy', 'slate'),i_units IN ('Bunch', 'Gross'),i_size IN ('extra large', 'petite')],AND[(item.i_category = 'Men'),i_color IN ('powder', 'sky'),i_units IN ('Dozen', 'Lb'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('maroon', 'smoke'),i_units IN ('Case', 'Ounce'),i_size IN ('economy', 'small')]]],AND[i_color IN ('aquamarine', 'dark', 'firebrick', 'frosted', 'papaya', 'peach', 'plum', 'sienna'),i_units IN ('Box', 'Bundle', 'Carton', 'Cup', 'Dram', 'Each', 'Tbl', 'Ton'),OR[AND[(item.i_category = 'Women'),i_color IN ('aquamarine', 'dark'),i_units IN ('Tbl', 'Ton'),i_size IN ('economy', 'small')],AND[(item.i_category = 'Women'),i_color IN ('frosted', 'plum'),i_units IN ('Box', 'Dram'),i_size IN ('extra large', 'petite')],AND[(item.i_category = 'Men'),i_color IN ('papaya', 'peach'),i_units IN ('Bundle', 'Carton'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('firebrick', 'sienna'),i_units IN ('Cup', 'Each'),i_size IN ('economy', 'small')]]]] and i_category IN ('Men', 'Women') and i_size IN ('N/A', 'economy', 'extra large', 'large', 'petite', 'small')) ---------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query42.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query42.out deleted file mode 100644 index 0e8b731938b8c3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query42.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_42 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((item.i_manager_id = 1)) ---------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter((dt.d_moy = 11) and (dt.d_year = 1998)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query43.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query43.out deleted file mode 100644 index 37ab89010ef0a9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query43.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_43 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -5.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query44.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query44.out deleted file mode 100644 index 5c302c265fc9f3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query44.out +++ /dev/null @@ -1,69 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_44 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((asceding.rnk = descending.rnk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i1.i_item_sk = asceding.item_sk)) otherCondition=() build RFs:RF1 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 4)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 4) and ss_hdemo_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i2.i_item_sk = descending.item_sk)) otherCondition=() build RFs:RF0 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 4)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 4) and ss_hdemo_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query45.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query45.out deleted file mode 100644 index 72acf1dc6b71ff..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query45.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_45 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),$c$1]) -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ws_bill_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_qoy = 1) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter(i_item_sk IN (11, 13, 17, 19, 2, 23, 29, 3, 5, 7)) -----------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query46.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query46.out deleted file mode 100644 index e7aa0e014dcdd9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query46.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_46 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 ca_address_sk->[c_current_addr_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffle] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF4 RF5 -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter(s_city IN ('Fairview', 'Midway')) -----------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------filter(d_dow IN (0, 6) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 8),(household_demographics.hd_vehicle_count = 0)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query47.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query47.out deleted file mode 100644 index 0e9f713243773a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query47.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_47 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter(OR[(date_dim.d_year = 2000),AND[(date_dim.d_year = 1999),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2001),(date_dim.d_moy = 1)]] and d_year IN (1999, 2000, 2001)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1)) and (v1.s_company_name = v1_lead.s_company_name) and (v1.s_store_name = v1_lead.s_store_name)) otherCondition=() build RFs:RF8 i_category->[i_category,i_category];RF9 i_brand->[i_brand,i_brand];RF10 s_store_name->[s_store_name,s_store_name];RF11 s_company_name->[s_company_name,s_company_name];RF12 expr_(rn - 1)->[(rn + 1),rn] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1)) and (v1.s_company_name = v1_lag.s_company_name) and (v1.s_store_name = v1_lag.s_store_name)) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 s_store_name->[s_store_name];RF6 s_company_name->[s_company_name];RF7 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 RF8 RF9 RF10 RF11 RF12 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 2000)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 RF9 RF10 RF11 RF12 -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query48.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query48.out deleted file mode 100644 index d11dadeae0b923..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query48.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_48 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('ND', 'NY', 'SD'),(store_sales.ss_net_profit <= 2000.00)],AND[ca_state IN ('GA', 'KS', 'MD'),(store_sales.ss_net_profit >= 150.00),(store_sales.ss_net_profit <= 3000.00)],AND[ca_state IN ('CO', 'MN', 'NC'),(store_sales.ss_net_profit >= 50.00)]]) build RFs:RF1 ca_address_sk->[ss_addr_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'Secondary'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00)],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Advanced Degree'),(store_sales.ss_sales_price >= 150.00)]]) build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_net_profit <= 25000.00) and (store_sales.ss_net_profit >= 0.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'Secondary')],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '2 yr Degree')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('2 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('D', 'M', 'S')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('CO', 'GA', 'KS', 'MD', 'MN', 'NC', 'ND', 'NY', 'SD')) -------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((date_dim.d_year = 2001)) ---------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query49.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query49.out deleted file mode 100644 index 8b807baf5409e4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query49.out +++ /dev/null @@ -1,107 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_49 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((ws.ws_item_sk = wr.wr_item_sk) and (ws.ws_order_number = wr.wr_order_number)) otherCondition=() build RFs:RF1 ws_order_number->[wr_order_number];RF2 ws_item_sk->[wr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((wr.wr_return_amt > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF1 RF2 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((ws.ws_net_paid > 0.00) and (ws.ws_net_profit > 1.00) and (ws.ws_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1998)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((cs.cs_item_sk = cr.cr_item_sk) and (cs.cs_order_number = cr.cr_order_number)) otherCondition=() build RFs:RF4 cs_order_number->[cr_order_number];RF5 cs_item_sk->[cr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((cr.cr_return_amount > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 RF5 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cs.cs_net_paid > 0.00) and (cs.cs_net_profit > 1.00) and (cs.cs_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1998)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((sts.ss_item_sk = sr.sr_item_sk) and (sts.ss_ticket_number = sr.sr_ticket_number)) otherCondition=() build RFs:RF7 ss_ticket_number->[sr_ticket_number];RF8 ss_item_sk->[sr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((sr.sr_return_amt > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF7 RF8 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sts.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sts.ss_net_paid > 0.00) and (sts.ss_net_profit > 1.00) and (sts.ss_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1998)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query5.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query5.out deleted file mode 100644 index 917f29d09727cf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query5.out +++ /dev/null @@ -1,77 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_5 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[sr_store_sk,ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk,ss_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() build RFs:RF3 cp_catalog_page_sk->[cr_catalog_page_sk,cs_catalog_page_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cr_returned_date_sk,cs_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_page] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.wsr_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF7 web_site_sk->[ws_web_site_sk,ws_web_site_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk,ws_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_returns.wr_item_sk = web_sales.ws_item_sk) and (web_returns.wr_order_number = web_sales.ws_order_number)) otherCondition=() build RFs:RF4 wr_item_sk->[ws_item_sk];RF5 wr_order_number->[ws_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 RF7 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query50.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query50.out deleted file mode 100644 index f5c3f38463d42c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query50.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_50 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF1 sr_ticket_number->[ss_ticket_number];RF2 sr_item_sk->[ss_item_sk];RF3 sr_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((d2.d_moy = 8) and (d2.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query51.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query51.out deleted file mode 100644 index 6c22d2df3086e3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query51.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_51 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((web_cumulative > store_cumulative)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((web.d_date = store.d_date) and (web.item_sk = store.item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1223) and (date_dim.d_month_seq >= 1212)) ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1223) and (date_dim.d_month_seq >= 1212)) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query52.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query52.out deleted file mode 100644 index 45fecf5a37245e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query52.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_52 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 1)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((dt.d_moy = 12) and (dt.d_year = 2000)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query53.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query53.out deleted file mode 100644 index d2467a65e93e09..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query53.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_53 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_quarterly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_quarterly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_quarterly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 1195, 1196, 1197)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query54.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query54.out deleted file mode 100644 index 397a41b34c4e60..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query54.out +++ /dev/null @@ -1,76 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_54 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) <= (d_month_seq + 3)) -----------------------------PhysicalProject -------------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) >= (d_month_seq + 1)) ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((my_customers.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_county = store.s_county) and (customer_address.ca_state = store.s_state)) otherCondition=() build RFs:RF4 s_county->[ca_county];RF5 s_state->[ca_state] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((my_customers.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 RF4 RF5 -------------------------------------------------PhysicalProject ---------------------------------------------------hashAgg[GLOBAL] -----------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------hashAgg[LOCAL] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_customer_sk = cs_or_ws_sales.customer_sk)) otherCondition=() build RFs:RF2 customer_sk->[c_customer_sk] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF2 -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ws_item_sk] ---------------------------------------------------------------------PhysicalUnion -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------filter((item.i_category = 'Music') and (item.i_class = 'country')) -------------------------------------------------------------------------PhysicalOlapScan[item] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 1999)) ---------------------------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalAssertNumRows -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 1999)) -----------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalAssertNumRows -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 1999)) -------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query55.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query55.out deleted file mode 100644 index 652a5dab8d16b2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query55.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_55 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 52)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2000)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query56.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query56.out deleted file mode 100644 index d3ec5b9b1b897e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query56.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_56 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('orchid', 'pink', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF7 ca_address_sk->[cs_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('orchid', 'pink', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ws_bill_addr_sk->[ca_address_sk] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] apply RFs: RF11 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('orchid', 'pink', 'powder')) ---------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query57.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query57.out deleted file mode 100644 index 2e8174812f69ea..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query57.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_57 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((call_center.cc_call_center_sk = catalog_sales.cs_call_center_sk)) otherCondition=() build RFs:RF1 cc_call_center_sk->[cs_call_center_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter(OR[(date_dim.d_year = 2001),AND[(date_dim.d_year = 2000),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2002),(date_dim.d_moy = 1)]] and d_year IN (2000, 2001, 2002)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[call_center] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.cc_name = v1_lead.cc_name) and (v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1))) otherCondition=() build RFs:RF7 i_category->[i_category,i_category];RF8 i_brand->[i_brand,i_brand];RF9 cc_name->[cc_name,cc_name];RF10 expr_(rn - 1)->[(rn + 1),rn] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.cc_name = v1_lag.cc_name) and (v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1))) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 cc_name->[cc_name];RF6 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 RF8 RF9 RF10 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 2001)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 RF8 RF9 RF10 -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query58.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query58.out deleted file mode 100644 index 97b3a3af96ad02..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query58.out +++ /dev/null @@ -1,86 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_58 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 i_item_sk->[ws_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF11 RF12 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF10 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF9 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-06-16')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF13 -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = cs_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE))) build RFs:RF8 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date = '2001-06-16')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[cs_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date = '2001-06-16')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query59.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query59.out deleted file mode 100644 index 4492908c371532..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query59.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_59 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------PhysicalProject -----------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 52)) and (y.s_store_id1 = x.s_store_id2)) otherCondition=() build RFs:RF5 s_store_id2->[s_store_id] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF3 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1206) and (d.d_month_seq >= 1195)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 RF2 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1218) and (d.d_month_seq >= 1207)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query6.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query6.out deleted file mode 100644 index 43c8732d7f8553..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query6.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_6 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((cnt >= 10)) -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((a.ca_address_sk = c.c_current_addr_sk)) otherCondition=() build RFs:RF5 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_customer_sk = s.ss_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_item_sk = i.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_sold_date_sk = d.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_month_seq = date_dim.d_month_seq)) otherCondition=() build RFs:RF1 d_month_seq->[d_month_seq] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 ---------------------------------------PhysicalAssertNumRows -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((j.i_category = i.i_category)) otherCondition=((cast(i_current_price as DECIMALV3(38, 5)) > (1.2 * avg(cast(i_current_price as DECIMALV3(9, 4)))))) build RFs:RF0 i_category->[i_category] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query60.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query60.out deleted file mode 100644 index e56a60c6eebef7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query60.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_60 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ss_addr_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 10) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Jewelry')) -----------------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[cs_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF6 ca_address_sk->[cs_bill_addr_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 10) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF4 -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Jewelry')) -----------------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ca_address_sk->[ws_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 10) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Jewelry')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query61.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query61.out deleted file mode 100644 index e768a09ec1494d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query61.out +++ /dev/null @@ -1,70 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_61 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF10 c_current_addr_sk->[ca_address_sk] -------------------PhysicalProject ---------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------PhysicalOlapScan[customer_address] apply RFs: RF10 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF9 ss_customer_sk->[c_customer_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[ss_promo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[ss_item_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 RF7 RF8 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -------------------------------PhysicalProject ---------------------------------filter(OR[(promotion.p_channel_dmail = 'Y'),(promotion.p_channel_email = 'Y'),(promotion.p_channel_tv = 'Y')]) -----------------------------------PhysicalOlapScan[promotion] ---------------------------PhysicalProject -----------------------------filter((store.s_gmt_offset = -7.00)) -------------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter((item.i_category = 'Home')) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((customer_address.ca_gmt_offset = -7.00)) -------------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -7.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query62.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query62.out deleted file mode 100644 index 00e7e385d016fe..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query62.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_62 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF2 sm_ship_mode_sk->[ws_ship_mode_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[ws_warehouse_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_ship_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1234) and (date_dim.d_month_seq >= 1223)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query63.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query63.out deleted file mode 100644 index bbbb80bc4b68e0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query63.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_63 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1222, 1223, 1224, 1225, 1226, 1227, 1228, 1229, 1230, 1231, 1232, 1233)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query64.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query64.out deleted file mode 100644 index 8155e898243e42..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query64.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_64 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF19 d_date_sk->[c_first_shipto_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF18 c_customer_sk->[ss_customer_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF17 ca_address_sk->[ss_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF16 p_promo_sk->[ss_promo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF14 ss_item_sk->[sr_item_sk];RF15 ss_ticket_number->[sr_ticket_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF14 RF15 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF13 cd_demo_sk->[ss_cdemo_sk] -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 i_item_sk->[cr_item_sk,cs_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF11 s_store_sk->[ss_store_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd1.hd_income_band_sk = ib1.ib_income_band_sk)) otherCondition=() build RFs:RF10 ib_income_band_sk->[hd_income_band_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = hd1.hd_demo_sk)) otherCondition=() build RFs:RF9 hd_demo_sk->[ss_hdemo_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF8 cs_item_sk->[ss_item_sk] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[ss_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF7 RF8 RF9 RF11 RF12 RF13 RF16 RF17 RF18 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter(d_year IN (1999, 2000)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------------PhysicalProject ---------------------------------------------------------filter((sale > (2 * refund))) -----------------------------------------------------------hashAgg[GLOBAL] -------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF5 cr_item_sk->[cs_item_sk];RF6 cr_order_number->[cs_order_number] ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF12 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF12 ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF10 -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[income_band] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store] ---------------------------------------PhysicalProject -----------------------------------------filter((item.i_current_price <= 58.00) and (item.i_current_price >= 49.00) and i_color IN ('blush', 'lace', 'lawn', 'misty', 'orange', 'pink')) -------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[promotion] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() build RFs:RF4 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF3 cd_demo_sk->[c_current_cdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[c_first_sales_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[c_current_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] apply RFs: RF1 RF2 RF3 RF4 RF19 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() build RFs:RF0 ib_income_band_sk->[hd_income_band_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[income_band] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((cs1.item_sk = cs2.item_sk) and (cs1.store_name = cs2.store_name) and (cs1.store_zip = cs2.store_zip)) otherCondition=((cs2.cnt <= cs1.cnt)) build RFs:RF20 item_sk->[item_sk];RF21 store_name->[store_name];RF22 store_zip->[store_zip] ---------------PhysicalProject -----------------filter((cs1.syear = 1999)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF20 RF21 RF22 ---------------PhysicalProject -----------------filter((cs2.syear = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query65.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query65.out deleted file mode 100644 index 67cd990e11a2e8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query65.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_65 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = sc.ss_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk,ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF2 ss_store_sk->[ss_store_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1187) and (date_dim.d_month_seq >= 1176)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF3 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_month_seq <= 1187) and (date_dim.d_month_seq >= 1176)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[store] -------------PhysicalProject ---------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query66.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query66.out deleted file mode 100644 index 924459179feee9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query66.out +++ /dev/null @@ -1,62 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_66 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF3 w_warehouse_sk->[ws_warehouse_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[ws_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF0 sm_ship_mode_sk->[ws_ship_mode_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------------PhysicalProject ---------------------------------------------filter(sm_carrier IN ('BOXBUNDLES', 'ORIENTAL')) -----------------------------------------------PhysicalOlapScan[ship_mode] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 71770) and (time_dim.t_time >= 42970)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF7 w_warehouse_sk->[cs_warehouse_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[cs_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF4 sm_ship_mode_sk->[cs_ship_mode_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 RF7 -------------------------------------------PhysicalProject ---------------------------------------------filter(sm_carrier IN ('BOXBUNDLES', 'ORIENTAL')) -----------------------------------------------PhysicalOlapScan[ship_mode] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 71770) and (time_dim.t_time >= 42970)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query67.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query67.out deleted file mode 100644 index 11f2d128129a53..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query67.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_67 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((rk <= 100)) -----------PhysicalWindow -------------PhysicalPartitionTopN ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalPartitionTopN -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1228) and (date_dim.d_month_seq >= 1217)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query68.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query68.out deleted file mode 100644 index 2f4fbe401f1315..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query68.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_68 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 c_current_addr_sk->[ca_address_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] apply RFs: RF5 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ss_addr_sk->[ca_address_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Fairview', 'Midway')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count = 4)]) ---------------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query69.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query69.out deleted file mode 100644 index 31101f12eab21a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query69.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_69 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------------------hashJoin[RIGHT_ANTI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_ship_customer_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2002)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF2 c_current_cdemo_sk->[cd_demo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF2 ---------------------------hashJoin[LEFT_ANTI_JOIN shuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------filter(ca_state IN ('IL', 'ME', 'TX')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2002)) -------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query7.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query7.out deleted file mode 100644 index 2d63af9e61b19e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query7.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_7 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[ss_promo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'College') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'W')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) ---------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query70.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query70.out deleted file mode 100644 index ec1bdd0e99afb6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query70.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_70 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1231) and (d1.d_month_seq >= 1220)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store.s_state = tmp1.s_state)) otherCondition=() build RFs:RF2 s_state->[s_state] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------------------PhysicalProject -----------------------------------------------------filter((date_dim.d_month_seq <= 1231) and (date_dim.d_month_seq >= 1220)) -------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query71.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query71.out deleted file mode 100644 index 0d26e1f81ccb94..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query71.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_71 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[cs_sold_time_sk,ss_sold_time_sk,ws_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.sold_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ss_item_sk,ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalUnion ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((item.i_manager_id = 1)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter(t_meal_time IN ('breakfast', 'dinner')) -------------------------PhysicalOlapScan[time_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query72.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query72.out deleted file mode 100644 index 06bd3cd70cf867..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query72.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_72 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((warehouse.w_warehouse_sk = inventory.inv_warehouse_sk)) otherCondition=() build RFs:RF10 w_warehouse_sk->[inv_warehouse_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = inventory.inv_item_sk) and (inventory.inv_date_sk = d2.d_date_sk)) otherCondition=((inventory.inv_quantity_on_hand < catalog_sales.cs_quantity)) build RFs:RF8 inv_date_sk->[d_date_sk];RF9 inv_item_sk->[cs_item_sk,i_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_week_seq = d2.d_week_seq)) otherCondition=() build RFs:RF6 d_week_seq->[d_week_seq] -------------------------------PhysicalProject ---------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_returns.cr_item_sk = catalog_sales.cs_item_sk) and (catalog_returns.cr_order_number = catalog_sales.cs_order_number)) otherCondition=() build RFs:RF4 cs_item_sk->[cr_item_sk];RF5 cs_order_number->[cr_order_number] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 RF5 -----------------------------------PhysicalProject -------------------------------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF3 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[cs_bill_hdemo_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = d3.d_date_sk) and (catalog_sales.cs_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_ship_date_sk];RF1 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 RF7 RF9 ---------------------------------------------------PhysicalProject -----------------------------------------------------NestedLoopJoin[INNER_JOIN](d3.d_date > days_add(d_date, INTERVAL 5 DAY)) -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------------PhysicalProject ---------------------------------------------------------filter((d1.d_year = 1998)) -----------------------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF6 -----------------------------------------------PhysicalProject -------------------------------------------------filter((household_demographics.hd_buy_potential = '1001-5000')) ---------------------------------------------------PhysicalOlapScan[household_demographics] -------------------------------------------PhysicalProject ---------------------------------------------filter((customer_demographics.cd_marital_status = 'S')) -----------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[promotion] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[date_dim] apply RFs: RF8 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] apply RFs: RF9 -----------------------PhysicalOlapScan[inventory] apply RFs: RF10 -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query73.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query73.out deleted file mode 100644 index 52c88ab966b1c9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query73.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_73 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dj.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer] apply RFs: RF3 -------------filter((dj.cnt <= 5) and (dj.cnt >= 1)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((store.s_county = 'Williamson County')) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.0) and hd_buy_potential IN ('1001-5000', '5001-10000')) -----------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query74.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query74.out deleted file mode 100644 index 30e95b3fd06a84..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query74.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_74 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.00), (cast(year_total as DECIMALV3(13, 8)) / year_total), NULL) > if((year_total > 0.00), (cast(year_total as DECIMALV3(13, 8)) / year_total), NULL))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.sale_type = 'w') and (t_w_secyear.year = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.sale_type = 's') and (t_s_secyear.year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.sale_type = 's') and (t_s_firstyear.year = 1999) and (t_s_firstyear.year_total > 0.00)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year = 1999) and (t_w_firstyear.year_total > 0.00)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query75.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query75.out deleted file mode 100644 index c26b81b87791ba..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query75.out +++ /dev/null @@ -1,73 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_75 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF2 cs_order_number->[cr_order_number];RF3 cs_item_sk->[cr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Sports')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (2001, 2002)) ---------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF6 ss_ticket_number->[sr_ticket_number];RF7 ss_item_sk->[sr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_returns] apply RFs: RF6 RF7 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Sports')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (2001, 2002)) ---------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF10 ws_order_number->[wr_order_number];RF11 ws_item_sk->[wr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_returns] apply RFs: RF10 RF11 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Sports')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (2001, 2002)) ---------------------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((curr_yr.i_brand_id = prev_yr.i_brand_id) and (curr_yr.i_category_id = prev_yr.i_category_id) and (curr_yr.i_class_id = prev_yr.i_class_id) and (curr_yr.i_manufact_id = prev_yr.i_manufact_id)) otherCondition=(((cast(cast(sales_cnt as DECIMALV3(17, 2)) as DECIMALV3(23, 8)) / cast(sales_cnt as DECIMALV3(17, 2))) < 0.900000)) build RFs:RF12 i_brand_id->[i_brand_id];RF13 i_class_id->[i_class_id];RF14 i_category_id->[i_category_id];RF15 i_manufact_id->[i_manufact_id] ---------------filter((curr_yr.d_year = 2002)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF12 RF13 RF14 RF15 ---------------filter((prev_yr.d_year = 2001)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query76.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query76.out deleted file mode 100644 index 473b9fded85715..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query76.out +++ /dev/null @@ -1,40 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_76 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -------------------PhysicalProject ---------------------PhysicalUnion -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------filter(ss_customer_sk IS NULL) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 ws_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter(ws_promo_sk IS NULL) ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -----------------------------PhysicalProject -------------------------------filter(cs_bill_customer_sk IS NULL) ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query77.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query77.out deleted file mode 100644 index 3659671c869dc8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query77.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_77 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ss.s_store_sk = sr.s_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '2000-09-09') and (date_dim.d_date >= '2000-08-10')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[sr_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '2000-09-09') and (date_dim.d_date >= '2000-08-10')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-09') and (date_dim.d_date >= '2000-08-10')) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-09') and (date_dim.d_date >= '2000-08-10')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.wp_web_page_sk = wr.wp_web_page_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF9 wp_web_page_sk->[ws_web_page_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '2000-09-09') and (date_dim.d_date >= '2000-08-10')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF7 wp_web_page_sk->[wr_web_page_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 RF7 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '2000-09-09') and (date_dim.d_date >= '2000-08-10')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query78.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query78.out deleted file mode 100644 index 0663ee2198a5fe..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query78.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_78 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(OR[(coalesce(ws_qty, 0) > 0),(coalesce(cs_qty, 0) > 0)]) -------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((cs.cs_customer_sk = ss.ss_customer_sk) and (cs.cs_item_sk = ss.ss_item_sk) and (cs.cs_sold_year = ss.ss_sold_year)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.ws_customer_sk = ss.ss_customer_sk) and (ws.ws_item_sk = ss.ss_item_sk) and (ws.ws_sold_year = ss.ss_sold_year)) otherCondition=() -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((store_returns.sr_ticket_number = store_sales.ss_ticket_number) and (store_sales.ss_item_sk = store_returns.sr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 1998)) -----------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((web_returns.wr_order_number = web_sales.ws_order_number) and (web_sales.ws_item_sk = web_returns.wr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 1998)) -----------------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((catalog_returns.cr_order_number = catalog_sales.cs_order_number) and (catalog_sales.cs_item_sk = catalog_returns.cr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query79.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query79.out deleted file mode 100644 index e3d8f3af326d91..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query79.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_79 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((ms.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dow = 1) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 7),(household_demographics.hd_vehicle_count > -1)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((store.s_number_employees <= 295) and (store.s_number_employees >= 200)) -----------------------------PhysicalOlapScan[store] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query8.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query8.out deleted file mode 100644 index a254eeae049f91..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query8.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_8 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_substring(s_zip, 1, 2) = expr_substring(ca_zip, 1, 2))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------PhysicalIntersect -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter((cnt > 10)) -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter((customer.c_preferred_cust_flag = 'Y')) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -------------------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -----------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query80.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query80.out deleted file mode 100644 index e33fb4f9e86ba9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query80.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_80 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF4 ss_item_sk->[sr_item_sk];RF5 ss_ticket_number->[sr_ticket_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF1 p_promo_sk->[ss_promo_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '2002-09-13') and (date_dim.d_date >= '2002-08-14')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------------------PhysicalOlapScan[promotion] -----------------------------------------PhysicalProject -------------------------------------------filter((item.i_current_price > 50.00)) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_catalog_page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() build RFs:RF11 cp_catalog_page_sk->[cs_catalog_page_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF9 cs_item_sk->[cr_item_sk];RF10 cs_order_number->[cr_order_number] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[cs_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[cs_promo_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[cs_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF6 RF7 RF8 RF11 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '2002-09-13') and (date_dim.d_date >= '2002-08-14')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------------------PhysicalOlapScan[promotion] -----------------------------------------PhysicalProject -------------------------------------------filter((item.i_current_price > 50.00)) ---------------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_page] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF16 ws_item_sk->[wr_item_sk];RF17 ws_order_number->[wr_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] apply RFs: RF16 RF17 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF15 web_site_sk->[ws_web_site_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF14 i_item_sk->[ws_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF13 p_promo_sk->[ws_promo_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF12 d_date_sk->[ws_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF12 RF13 RF14 RF15 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '2002-09-13') and (date_dim.d_date >= '2002-08-14')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------------------PhysicalOlapScan[promotion] -----------------------------------------PhysicalProject -------------------------------------------filter((item.i_current_price > 50.00)) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query81.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query81.out deleted file mode 100644 index b3b6627dd0716d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query81.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_81 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_returns.cr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[cr_returning_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cr_returned_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[catalog_returns] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2001)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF4 ctr_state->[ctr_state] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ctr_customer_sk] -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'TN')) ---------------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query82.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query82.out deleted file mode 100644 index a1bb3a33e1d0ff..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query82.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_82 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((item.i_current_price <= 88.00) and (item.i_current_price >= 58.00) and i_manufact_id IN (259, 485, 559, 580)) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_date <= '2001-03-14') and (date_dim.d_date >= '2001-01-13')) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query83.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query83.out deleted file mode 100644 index 24a4ca5fa16b24..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query83.out +++ /dev/null @@ -1,80 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_83 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id,i_item_id] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = cr_items.item_id)) otherCondition=() build RFs:RF12 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF11 i_item_sk->[sr_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF10 d_date_sk->[sr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF10 RF11 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF9 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF8 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF8 -----------------------------------------PhysicalProject -------------------------------------------filter(d_date IN ('2001-07-13', '2001-09-10', '2001-11-16')) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_returns.cr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[cr_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[cr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -----------------------------------------PhysicalProject -------------------------------------------filter(d_date IN ('2001-07-13', '2001-09-10', '2001-11-16')) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF13 -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_returns.wr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[wr_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[wr_returned_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_returns] apply RFs: RF2 RF3 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-07-13', '2001-09-10', '2001-11-16')) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query84.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query84.out deleted file mode 100644 index 050e21b33ef3c1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query84.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_84 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[sr_cdemo_sk] -------------PhysicalProject ---------------PhysicalOlapScan[store_returns] apply RFs: RF4 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF3 c_current_cdemo_sk->[cd_demo_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter((customer_address.ca_city = 'Woodland')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((income_band.ib_income_band_sk = household_demographics.hd_income_band_sk)) otherCondition=() build RFs:RF0 ib_income_band_sk->[hd_income_band_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[household_demographics] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((cast(ib_upper_bound as BIGINT) <= 110306) and (income_band.ib_lower_bound >= 60306)) -----------------------------PhysicalOlapScan[income_band] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query85.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query85.out deleted file mode 100644 index 6f4f258704f239..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query85.out +++ /dev/null @@ -1,46 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_85 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((reason.r_reason_sk = web_returns.wr_reason_sk)) otherCondition=() build RFs:RF9 r_reason_sk->[wr_reason_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_education_status = cd2.cd_education_status) and (cd1.cd_marital_status = cd2.cd_marital_status) and (cd2.cd_demo_sk = web_returns.wr_returning_cdemo_sk)) otherCondition=() build RFs:RF6 wr_returning_cdemo_sk->[cd_demo_sk];RF7 cd_marital_status->[cd_marital_status];RF8 cd_education_status->[cd_education_status] -------------------------PhysicalProject ---------------------------filter(cd_education_status IN ('Advanced Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'S', 'U')) -----------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF6 RF7 RF8 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF5 wp_web_page_sk->[ws_web_page_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = web_returns.wr_refunded_addr_sk)) otherCondition=(OR[AND[ca_state IN ('IA', 'NC', 'TX'),(web_sales.ws_net_profit >= 100.00),(web_sales.ws_net_profit <= 200.00)],AND[ca_state IN ('GA', 'WI', 'WV'),(web_sales.ws_net_profit >= 150.00)],AND[ca_state IN ('KY', 'OK', 'VA'),(web_sales.ws_net_profit <= 250.00)]]) build RFs:RF4 wr_refunded_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('GA', 'IA', 'KY', 'NC', 'OK', 'TX', 'VA', 'WI', 'WV')) -------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF4 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_demo_sk = web_returns.wr_refunded_cdemo_sk)) otherCondition=(OR[AND[(cd1.cd_marital_status = 'D'),(cd1.cd_education_status = 'Primary'),(web_sales.ws_sales_price >= 100.00),(web_sales.ws_sales_price <= 150.00)],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'College'),(web_sales.ws_sales_price <= 100.00)],AND[(cd1.cd_marital_status = 'U'),(cd1.cd_education_status = 'Advanced Degree'),(web_sales.ws_sales_price >= 150.00)]]) build RFs:RF3 cd_demo_sk->[wr_refunded_cdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF1 ws_item_sk->[wr_item_sk];RF2 ws_order_number->[wr_order_number] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF1 RF2 RF3 RF9 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------filter((web_sales.ws_net_profit <= 300.00) and (web_sales.ws_net_profit >= 50.00) and (web_sales.ws_sales_price <= 200.00) and (web_sales.ws_sales_price >= 50.00)) -------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF5 ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------filter(OR[AND[(cd1.cd_marital_status = 'D'),(cd1.cd_education_status = 'Primary')],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'College')],AND[(cd1.cd_marital_status = 'U'),(cd1.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'S', 'U')) -----------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_page] ---------------------PhysicalProject -----------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query86.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query86.out deleted file mode 100644 index 24ed7d94f66e63..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query86.out +++ /dev/null @@ -1,28 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_86 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1197) and (d1.d_month_seq >= 1186)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query87.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query87.out deleted file mode 100644 index d21bcaff8bef43..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query87.out +++ /dev/null @@ -1,48 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_87 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalExcept -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ss_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1213) and (date_dim.d_month_seq >= 1202)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1213) and (date_dim.d_month_seq >= 1202)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ws_bill_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1213) and (date_dim.d_month_seq >= 1202)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query88.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query88.out deleted file mode 100644 index 5da04ad61d3f42..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query88.out +++ /dev/null @@ -1,171 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_88 -- -PhysicalResultSink ---NestedLoopJoin[CROSS_JOIN] -----NestedLoopJoin[CROSS_JOIN] -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF23 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF22 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF21 t_time_sk->[ss_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF21 RF22 RF23 -----------------------------------PhysicalProject -------------------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF20 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF19 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF18 t_time_sk->[ss_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF18 RF19 RF20 -----------------------------------PhysicalProject -------------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute < 30)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF17 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF16 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF15 t_time_sk->[ss_sold_time_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF15 RF16 RF17 ---------------------------------PhysicalProject -----------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute >= 30)) -------------------------------------PhysicalOlapScan[time_dim] -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((store.s_store_name = 'ese')) -----------------------------PhysicalOlapScan[store] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF14 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF13 hd_demo_sk->[ss_hdemo_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF12 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF12 RF13 RF14 -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute < 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((store.s_store_name = 'ese')) ---------------------------PhysicalOlapScan[store] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF11 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF10 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF9 t_time_sk->[ss_sold_time_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 RF10 RF11 -----------------------------PhysicalProject -------------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute >= 30)) ---------------------------------PhysicalOlapScan[time_dim] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((store.s_store_name = 'ese')) -------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF7 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[ss_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute < 30)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((store.s_store_name = 'ese')) -----------------------PhysicalOlapScan[store] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF3 t_time_sk->[ss_sold_time_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF5 -------------------------PhysicalProject ---------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute >= 30)) -----------------------------PhysicalOlapScan[time_dim] ---------------------PhysicalProject -----------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) -------------------------PhysicalOlapScan[household_demographics] -----------------PhysicalProject -------------------filter((store.s_store_name = 'ese')) ---------------------PhysicalOlapScan[store] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 12) and (time_dim.t_minute < 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) -----------------------PhysicalOlapScan[household_demographics] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query89.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query89.out deleted file mode 100644 index b8751687a0ff29..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query89.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_89 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter((if(( not (avg_monthly_sales = 0.0000)), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('audio', 'history', 'school-uniforms')],AND[i_category IN ('Men', 'Shoes', 'Sports'),i_class IN ('pants', 'tennis', 'womens')]] and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Shoes', 'Sports') and i_class IN ('audio', 'history', 'pants', 'school-uniforms', 'tennis', 'womens')) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_year = 2001)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query9.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query9.out deleted file mode 100644 index 06cd8f92785e08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query9.out +++ /dev/null @@ -1,115 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_9 -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------NestedLoopJoin[CROSS_JOIN] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------NestedLoopJoin[CROSS_JOIN] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------NestedLoopJoin[CROSS_JOIN] -------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------------PhysicalProject -----------------------------------------filter((reason.r_reason_sk = 1)) -------------------------------------------PhysicalOlapScan[reason] ---------------------------------------hashAgg[GLOBAL] -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[LOCAL] ---------------------------------------------PhysicalProject -----------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) ---------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------PhysicalOlapScan[store_sales] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -----------------------------------------PhysicalOlapScan[store_sales] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) ---------------------------------------PhysicalOlapScan[store_sales] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -------------------------------------PhysicalOlapScan[store_sales] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -----------------------------------PhysicalOlapScan[store_sales] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) ---------------------------------PhysicalOlapScan[store_sales] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecGather] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -------------------------------PhysicalOlapScan[store_sales] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -----------------------------PhysicalOlapScan[store_sales] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) ---------------------------PhysicalOlapScan[store_sales] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -------------------------PhysicalOlapScan[store_sales] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -----------------------PhysicalOlapScan[store_sales] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) ---------------------PhysicalOlapScan[store_sales] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query90.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query90.out deleted file mode 100644 index e5f91ba2a61448..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query90.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_90 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF5 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ws_ship_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF3 t_time_sk->[ws_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour <= 13) and (time_dim.t_hour >= 12)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 6)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF2 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ws_ship_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ws_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour <= 15) and (time_dim.t_hour >= 14)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 6)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query91.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query91.out deleted file mode 100644 index 9d3c77acb23ca8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query91.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_91 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF5 cc_call_center_sk->[cr_call_center_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returning_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cr_returning_customer_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF3 RF4 RF5 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 c_current_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[c_current_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[c_current_cdemo_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'Unknown') and cd_marital_status IN ('M', 'W')) ---------------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------------PhysicalProject ---------------------------------------filter((hd_buy_potential like 'Unknown%')) -----------------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query92.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query92.out deleted file mode 100644 index 39a6db24528f22..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query92.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_92 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(ws_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(ws_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -----------------------------PhysicalProject -------------------------------filter((item.i_manufact_id = 714)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2000-05-01') and (date_dim.d_date >= '2000-02-01')) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query93.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query93.out deleted file mode 100644 index 5f2b776e674990..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query93.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_93 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((store_returns.sr_item_sk = store_sales.ss_item_sk) and (store_returns.sr_ticket_number = store_sales.ss_ticket_number)) otherCondition=() build RFs:RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_reason_sk = reason.r_reason_sk)) otherCondition=() build RFs:RF0 r_reason_sk->[sr_reason_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((reason.r_reason_desc = 'reason 58')) ---------------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query94.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query94.out deleted file mode 100644 index 0f35f2dc29e44e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query94.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_94 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF4 ws_order_number->[ws_order_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] apply RFs: RF4 -------------------hashJoin[RIGHT_ANTI_JOIN shuffle] hashCondition=((ws1.ws_order_number = wr1.wr_order_number)) otherCondition=() build RFs:RF3 ws_order_number->[wr_order_number] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_returns] apply RFs: RF3 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF2 web_site_sk->[ws_web_site_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_ship_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[ws_ship_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_state = 'OK')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '2002-06-30') and (date_dim.d_date >= '2002-05-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((web_site.web_company_name = 'pri')) -----------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query95.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query95.out deleted file mode 100644 index 21c6fa60d37b75..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query95.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_95 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number] ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF0 RF7 ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF7 ---PhysicalResultSink -----PhysicalTopN[GATHER_SORT] -------hashAgg[DISTINCT_GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[DISTINCT_LOCAL] -------------hashAgg[GLOBAL] ---------------hashAgg[LOCAL] -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF6 ws_order_number->[wr_order_number,ws_order_number] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF5 wr_order_number->[ws_order_number] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF7 ws_order_number->[ws_order_number,ws_order_number] ---------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_ship_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ws_ship_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_state = 'VA')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '2001-05-31') and (date_dim.d_date >= '2001-04-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((web_site.web_company_name = 'pri')) -----------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query96.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query96.out deleted file mode 100644 index b4b739a9bf444f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query96.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_96 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((household_demographics.hd_dep_count = 0)) -----------------------PhysicalOlapScan[household_demographics] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query97.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query97.out deleted file mode 100644 index d3a845763241f7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query97.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_97 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((ssci.customer_sk = csci.customer_sk) and (ssci.item_sk = csci.item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------filter(( not ss_sold_date_sk IS NULL)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1210) and (date_dim.d_month_seq >= 1199)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------filter(( not cs_sold_date_sk IS NULL)) ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1210) and (date_dim.d_month_seq >= 1199)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query98.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query98.out deleted file mode 100644 index beb47b1d23d9dd..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query98.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_98 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '1999-03-07') and (date_dim.d_date >= '1999-02-05')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Jewelry', 'Men', 'Sports')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query99.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query99.out deleted file mode 100644 index de639b9015342e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query99.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_99 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF3 cc_call_center_sk->[cs_call_center_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF2 sm_ship_mode_sk->[cs_ship_mode_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[cs_warehouse_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_ship_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1205) and (date_dim.d_month_seq >= 1194)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.out deleted file mode 100644 index 22f0777694a7ce..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate <= '1998-09-02')) -------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.out deleted file mode 100644 index a532a3f74c5395..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = nation.n_nationkey)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] -----------------------PhysicalProject -------------------------filter((lineitem.l_returnflag = 'R')) ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------filter((orders.o_orderdate < '1994-01-01') and (orders.o_orderdate >= '1993-10-01')) -------------------------------PhysicalOlapScan[orders] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.out deleted file mode 100644 index 6d414f1f56a4f4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------NestedLoopJoin[INNER_JOIN](cast(value as DOUBLE) > cast((sum((ps_supplycost * cast(ps_availqty as DECIMALV3(10, 0)))) * 0.000002) as DOUBLE)) -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF3 n_nationkey->[s_nationkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[ps_suppkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[partsupp] apply RFs: RF2 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((nation.n_name = 'GERMANY')) -------------------------PhysicalOlapScan[nation] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[ps_suppkey] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[supplier] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter((nation.n_name = 'GERMANY')) -----------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.out deleted file mode 100644 index 8df830dd428e58..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.out +++ /dev/null @@ -1,17 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate) and (lineitem.l_receiptdate < '1995-01-01') and (lineitem.l_receiptdate >= '1994-01-01') and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate < lineitem.l_commitdate) and l_shipmode IN ('MAIL', 'SHIP')) -----------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.out deleted file mode 100644 index 19361de35a19df..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.out +++ /dev/null @@ -1,19 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() -----------------------PhysicalProject -------------------------filter(( not (o_comment like '%special%requests%'))) ---------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.out deleted file mode 100644 index 53fedeb6ad34c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() ---------------PhysicalProject -----------------filter((lineitem.l_shipdate < '1995-10-01') and (lineitem.l_shipdate >= '1995-09-01')) -------------------PhysicalOlapScan[lineitem] ---------------PhysicalProject -----------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.out deleted file mode 100644 index 7b04caaf3e087a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] -------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((s_comment like '%Customer%Complaints%')) -------------------------PhysicalOlapScan[supplier] -------------------PhysicalProject ---------------------filter(( not (p_brand = 'Brand#45')) and ( not (p_type like 'MEDIUM POLISHED%')) and p_size IN (14, 19, 23, 3, 36, 45, 49, 9)) -----------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.out deleted file mode 100644 index 92cc8c93708400..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.out +++ /dev/null @@ -1,19 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(l_quantity as DECIMALV3(38, 5)) < (0.2 * avg(cast(l_quantity as DECIMALV3(17, 4))) OVER(PARTITION BY p_partkey)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF0 p_partkey->[l_partkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((part.p_brand = 'Brand#23') and (part.p_container = 'MED BOX')) ---------------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.out deleted file mode 100644 index 79164f3a7abb0d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF2 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------PhysicalOlapScan[lineitem] apply RFs: RF2 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() -------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[orders] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((sum(l_quantity) > 300.00)) -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.out deleted file mode 100644 index 78faf3234691b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=(OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(lineitem.l_quantity <= 11.00),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(lineitem.l_quantity >= 10.00),(lineitem.l_quantity <= 20.00),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG'),(lineitem.l_quantity >= 20.00)]]) build RFs:RF0 p_partkey->[l_partkey] -------------PhysicalProject ---------------filter((lineitem.l_quantity <= 30.00) and (lineitem.l_quantity >= 1.00) and (lineitem.l_shipinstruct = 'DELIVER IN PERSON') and l_shipmode IN ('AIR REG', 'AIR')) -----------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------PhysicalProject ---------------filter((part.p_size <= 15) and (part.p_size >= 1) and OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG')]] and p_brand IN ('Brand#12', 'Brand#23', 'Brand#34') and p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG', 'MED BAG', 'MED BOX', 'MED PACK', 'MED PKG', 'SM BOX', 'SM CASE', 'SM PACK', 'SM PKG')) -----------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.out deleted file mode 100644 index c1a68c315e06b2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((partsupp.ps_supplycost = min(ps_supplycost) OVER(PARTITION BY p_partkey))) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF3 r_regionkey->[n_regionkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF2 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[ps_suppkey] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 RF1 ---------------------------------PhysicalProject -----------------------------------filter((p_type like '%BRASS') and (part.p_size = 15)) -------------------------------------PhysicalOlapScan[part] -----------------------------PhysicalOlapScan[supplier] apply RFs: RF2 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[nation] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((region.r_name = 'EUROPE')) -------------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.out deleted file mode 100644 index 89548468b7c1ae..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] -------------PhysicalProject ---------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey,ps_suppkey] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t2.l_partkey = t1.ps_partkey) and (t2.l_suppkey = t1.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > t2.l_q)) build RFs:RF1 ps_partkey->[l_partkey];RF2 ps_suppkey->[l_suppkey] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 ---------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 RF3 -----------------------PhysicalProject -------------------------filter((p_name like 'forest%')) ---------------------------PhysicalOlapScan[part] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF4 -------------PhysicalProject ---------------filter((nation.n_name = 'CANADA')) -----------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.out deleted file mode 100644 index 7678db3199aef2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] -------------PhysicalProject ---------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey,ps_suppkey] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = partsupp.ps_partkey) and (lineitem.l_suppkey = partsupp.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > (0.5 * sum(l_quantity)))) build RFs:RF1 ps_partkey->[l_partkey];RF2 ps_suppkey->[l_suppkey] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 ---------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 RF3 -----------------------PhysicalProject -------------------------filter((p_name like 'forest%')) ---------------------------PhysicalOlapScan[part] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF4 -------------PhysicalProject ---------------filter((nation.n_name = 'CANADA')) -----------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.out deleted file mode 100644 index c54a6b502f590d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = l1.l_orderkey)) otherCondition=() build RFs:RF3 o_orderkey->[l_orderkey,l_orderkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = l1.l_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey] ---------------------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((l2.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF1 l_orderkey->[l_orderkey] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF3 -----------------------------hashJoin[RIGHT_ANTI_JOIN colocated] hashCondition=((l3.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF0 l_orderkey->[l_orderkey] -------------------------------PhysicalProject ---------------------------------filter((l3.l_receiptdate > l3.l_commitdate)) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((l1.l_receiptdate > l1.l_commitdate)) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF4 -----------------------PhysicalProject -------------------------filter((orders.o_orderstatus = 'F')) ---------------------------PhysicalOlapScan[orders] -------------------PhysicalProject ---------------------filter((nation.n_name = 'SAUDI ARABIA')) -----------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.out deleted file mode 100644 index 63d82280b35b16..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_ANTI_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------NestedLoopJoin[INNER_JOIN](cast(c_acctbal as DECIMALV3(38, 4)) > avg(cast(c_acctbal as DECIMALV3(17, 4)))) -----------------------PhysicalProject -------------------------filter(substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------PhysicalOlapScan[customer] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((customer.c_acctbal > 0.00) and substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.out deleted file mode 100644 index 48d4e37ec466e3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate > '1995-03-15')) -------------------PhysicalOlapScan[lineitem] apply RFs: RF1 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1995-03-15')) -----------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((customer.c_mktsegment = 'BUILDING')) -----------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.out deleted file mode 100644 index 19b73f24dc3315..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.out +++ /dev/null @@ -1,18 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate)) -----------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1993-10-01') and (orders.o_orderdate >= '1993-07-01')) -----------------------PhysicalOlapScan[orders] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.out deleted file mode 100644 index 55a5eab9536c60..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF5 r_regionkey->[n_regionkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[c_nationkey,s_nationkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = supplier.s_nationkey) and (lineitem.l_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -----------------------------------PhysicalProject -------------------------------------filter((orders.o_orderdate < '1995-01-01') and (orders.o_orderdate >= '1994-01-01')) ---------------------------------------PhysicalOlapScan[orders] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF4 -----------------------PhysicalProject -------------------------PhysicalOlapScan[nation] apply RFs: RF5 -------------------PhysicalProject ---------------------filter((region.r_name = 'ASIA')) -----------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.out deleted file mode 100644 index f1f764bec09499..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------filter((lineitem.l_discount <= 0.07) and (lineitem.l_discount >= 0.05) and (lineitem.l_quantity < 24.00) and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.out deleted file mode 100644 index b98149f8668a1c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n2.n_nationkey) and (supplier.s_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF3 n_nationkey->[c_nationkey];RF4 n_nationkey->[s_nationkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF2 c_custkey->[o_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF1 l_orderkey->[o_orderkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[orders] apply RFs: RF1 RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[l_suppkey] -------------------------------PhysicalProject ---------------------------------filter((lineitem.l_shipdate <= '1996-12-31') and (lineitem.l_shipdate >= '1995-01-01')) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[supplier] apply RFs: RF4 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------NestedLoopJoin[INNER_JOIN]OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]] ---------------------PhysicalProject -----------------------filter(n_name IN ('FRANCE', 'GERMANY')) -------------------------PhysicalOlapScan[nation] ---------------------PhysicalProject -----------------------filter(n_name IN ('FRANCE', 'GERMANY')) -------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.out deleted file mode 100644 index 36e6e6b1ee6c61..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((n1.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF6 r_regionkey->[n_regionkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n2.n_nationkey)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[c_nationkey] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF3 c_custkey->[o_custkey] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF1 p_partkey->[l_partkey] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 RF1 ---------------------------------------------PhysicalProject -----------------------------------------------filter((orders.o_orderdate <= '1996-12-31') and (orders.o_orderdate >= '1995-01-01')) -------------------------------------------------PhysicalOlapScan[orders] apply RFs: RF3 -----------------------------------------PhysicalProject -------------------------------------------filter((part.p_type = 'ECONOMY ANODIZED STEEL')) ---------------------------------------------PhysicalOlapScan[part] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[supplier] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[nation] apply RFs: RF6 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[nation] ---------------------PhysicalProject -----------------------filter((region.r_name = 'AMERICA')) -------------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.out deleted file mode 100644 index b60760ec115acb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF4 p_partkey->[l_partkey,ps_partkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF4 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[orders] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[partsupp] apply RFs: RF4 -----------------------PhysicalProject -------------------------filter((p_name like '%green%')) ---------------------------PhysicalOlapScan[part] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() -----------------------PhysicalProject -------------------------PhysicalOlapScan[supplier] -----------------------PhysicalProject -------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q1.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q1.out deleted file mode 100644 index 22f0777694a7ce..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q1.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate <= '1998-09-02')) -------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q10.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q10.out deleted file mode 100644 index e47bc37df32324..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q10.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = nation.n_nationkey)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF1 o_custkey->[c_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] apply RFs: RF1 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] -----------------------PhysicalProject -------------------------filter((lineitem.l_returnflag = 'R')) ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((orders.o_orderdate < '1994-01-01') and (orders.o_orderdate >= '1993-10-01')) ---------------------------PhysicalOlapScan[orders] ---------------PhysicalProject -----------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q11.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q11.out deleted file mode 100644 index 40df7553b0352f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q11.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------NestedLoopJoin[INNER_JOIN](cast(value as DOUBLE) > cast((sum((ps_supplycost * cast(ps_availqty as DECIMALV3(10, 0)))) * 0.000002) as DOUBLE)) -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] apply RFs: RF3 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF2 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] apply RFs: RF2 -------------------------PhysicalProject ---------------------------filter((nation.n_name = 'GERMANY')) -----------------------------PhysicalOlapScan[nation] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[ps_suppkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[partsupp] apply RFs: RF1 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((nation.n_name = 'GERMANY')) ---------------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q12.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q12.out deleted file mode 100644 index 8df830dd428e58..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q12.out +++ /dev/null @@ -1,17 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate) and (lineitem.l_receiptdate < '1995-01-01') and (lineitem.l_receiptdate >= '1994-01-01') and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate < lineitem.l_commitdate) and l_shipmode IN ('MAIL', 'SHIP')) -----------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q13.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q13.out deleted file mode 100644 index 19361de35a19df..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q13.out +++ /dev/null @@ -1,19 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() -----------------------PhysicalProject -------------------------filter(( not (o_comment like '%special%requests%'))) ---------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q14.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q14.out deleted file mode 100644 index 6df1a05fa3b57f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q14.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 l_partkey->[p_partkey] ---------------PhysicalProject -----------------PhysicalOlapScan[part] apply RFs: RF0 ---------------PhysicalProject -----------------filter((lineitem.l_shipdate < '1995-10-01') and (lineitem.l_shipdate >= '1995-09-01')) -------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q15.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q15.out deleted file mode 100644 index 1d3d780435aab3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q15.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = revenue0.supplier_no)) otherCondition=() build RFs:RF0 supplier_no->[s_suppkey] -------------PhysicalProject ---------------PhysicalOlapScan[supplier] apply RFs: RF0 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((revenue0.total_revenue = max(total_revenue))) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------PhysicalOlapScan[lineitem] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q16.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q16.out deleted file mode 100644 index f04b0bc766338b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q16.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter(( not (p_brand = 'Brand#45')) and ( not (p_type like 'MEDIUM POLISHED%')) and p_size IN (14, 19, 23, 3, 36, 45, 49, 9)) -------------------------PhysicalOlapScan[part] -----------------PhysicalProject -------------------filter((s_comment like '%Customer%Complaints%')) ---------------------PhysicalOlapScan[supplier] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q17.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q17.out deleted file mode 100644 index 850c567ab4aa39..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q17.out +++ /dev/null @@ -1,20 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(l_quantity as DECIMALV3(38, 5)) < (0.2 * avg(cast(l_quantity as DECIMALV3(17, 4))) OVER(PARTITION BY p_partkey)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF0 p_partkey->[l_partkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((part.p_brand = 'Brand#23') and (part.p_container = 'MED BOX')) -----------------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q18.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q18.out deleted file mode 100644 index e4c5acd49e3fb3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q18.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF2 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------PhysicalOlapScan[lineitem] apply RFs: RF2 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF1 o_custkey->[c_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] apply RFs: RF1 -------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[orders] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((sum(l_quantity) > 300.00)) -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q19.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q19.out deleted file mode 100644 index 78faf3234691b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q19.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=(OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(lineitem.l_quantity <= 11.00),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(lineitem.l_quantity >= 10.00),(lineitem.l_quantity <= 20.00),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG'),(lineitem.l_quantity >= 20.00)]]) build RFs:RF0 p_partkey->[l_partkey] -------------PhysicalProject ---------------filter((lineitem.l_quantity <= 30.00) and (lineitem.l_quantity >= 1.00) and (lineitem.l_shipinstruct = 'DELIVER IN PERSON') and l_shipmode IN ('AIR REG', 'AIR')) -----------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------PhysicalProject ---------------filter((part.p_size <= 15) and (part.p_size >= 1) and OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG')]] and p_brand IN ('Brand#12', 'Brand#23', 'Brand#34') and p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG', 'MED BAG', 'MED BOX', 'MED PACK', 'MED PKG', 'SM BOX', 'SM CASE', 'SM PACK', 'SM PKG')) -----------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q2.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q2.out deleted file mode 100644 index fc7b45224906e4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q2.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((partsupp.ps_supplycost = min(ps_supplycost) OVER(PARTITION BY p_partkey))) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF2 p_partkey->[ps_partkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[partsupp] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((p_type like '%BRASS') and (part.p_size = 15)) -------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] ---------------------------PhysicalOlapScan[supplier] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF0 r_regionkey->[n_regionkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[nation] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((region.r_name = 'EUROPE')) -----------------------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.out deleted file mode 100644 index 6b7a6da490ce49..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[l_suppkey,ps_suppkey] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t2.l_partkey = t1.ps_partkey) and (t2.l_suppkey = t1.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > t2.l_q)) build RFs:RF2 ps_partkey->[l_partkey];RF3 ps_suppkey->[l_suppkey] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -----------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 RF4 -----------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[ps_partkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] apply RFs: RF1 RF4 -------------------PhysicalProject ---------------------filter((p_name like 'forest%')) -----------------------PhysicalOlapScan[part] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------PhysicalProject -------------------filter((nation.n_name = 'CANADA')) ---------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20.out deleted file mode 100644 index 6b3b115fcfee2f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[l_suppkey,ps_suppkey] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = partsupp.ps_partkey) and (lineitem.l_suppkey = partsupp.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > (0.5 * sum(l_quantity)))) build RFs:RF2 ps_partkey->[l_partkey];RF3 ps_suppkey->[l_suppkey] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 RF4 -----------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[ps_partkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] apply RFs: RF1 RF4 -------------------PhysicalProject ---------------------filter((p_name like 'forest%')) -----------------------PhysicalOlapScan[part] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------PhysicalProject -------------------filter((nation.n_name = 'CANADA')) ---------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q21.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q21.out deleted file mode 100644 index 0436a7b245b174..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q21.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((l2.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF4 l_orderkey->[l_orderkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[lineitem] apply RFs: RF4 -------------------hashJoin[RIGHT_ANTI_JOIN colocated] hashCondition=((l3.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF3 l_orderkey->[l_orderkey] ---------------------PhysicalProject -----------------------filter((l3.l_receiptdate > l3.l_commitdate)) -------------------------PhysicalOlapScan[lineitem] apply RFs: RF3 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = l1.l_orderkey)) otherCondition=() build RFs:RF2 l_orderkey->[o_orderkey] -------------------------PhysicalProject ---------------------------filter((orders.o_orderstatus = 'F')) -----------------------------PhysicalOlapScan[orders] apply RFs: RF2 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = l1.l_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[l_suppkey] -----------------------------PhysicalProject -------------------------------filter((l1.l_receiptdate > l1.l_commitdate)) ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[supplier] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------filter((nation.n_name = 'SAUDI ARABIA')) -------------------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q22.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q22.out deleted file mode 100644 index 63d82280b35b16..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q22.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_ANTI_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------NestedLoopJoin[INNER_JOIN](cast(c_acctbal as DECIMALV3(38, 4)) > avg(cast(c_acctbal as DECIMALV3(17, 4)))) -----------------------PhysicalProject -------------------------filter(substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------PhysicalOlapScan[customer] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((customer.c_acctbal > 0.00) and substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q3.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q3.out deleted file mode 100644 index 36d395afccaa2d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q3.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate > '1995-03-15')) -------------------PhysicalOlapScan[lineitem] apply RFs: RF1 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1995-03-15')) -----------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((customer.c_mktsegment = 'BUILDING')) -----------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q4.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q4.out deleted file mode 100644 index 19b73f24dc3315..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q4.out +++ /dev/null @@ -1,18 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate)) -----------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1993-10-01') and (orders.o_orderdate >= '1993-07-01')) -----------------------PhysicalOlapScan[orders] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q5.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q5.out deleted file mode 100644 index 2f45b1e87b401d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q5.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey) and (customer.c_nationkey = supplier.s_nationkey)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF2 o_orderkey->[l_orderkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((orders.o_orderdate < '1995-01-01') and (orders.o_orderdate >= '1994-01-01')) -------------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF0 r_regionkey->[n_regionkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[nation] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((region.r_name = 'ASIA')) -----------------------------------PhysicalOlapScan[region] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q6.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q6.out deleted file mode 100644 index f1f764bec09499..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q6.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------filter((lineitem.l_discount <= 0.07) and (lineitem.l_discount >= 0.05) and (lineitem.l_quantity < 24.00) and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q7.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q7.out deleted file mode 100644 index 957b17a7402749..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q7.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=(OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]]) build RFs:RF4 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF3 l_orderkey->[o_orderkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[orders] apply RFs: RF3 RF4 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate <= '1996-12-31') and (lineitem.l_shipdate >= '1995-01-01')) -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[supplier] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------filter(n_name IN ('FRANCE', 'GERMANY')) -----------------------------------PhysicalOlapScan[nation] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n2.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[c_nationkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter(n_name IN ('FRANCE', 'GERMANY')) ---------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q8.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q8.out deleted file mode 100644 index fc46b49054b927..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q8.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n2.n_nationkey)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF5 l_suppkey->[s_suppkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] apply RFs: RF5 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF4 c_custkey->[o_custkey] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF3 l_orderkey->[o_orderkey] ---------------------------------PhysicalProject -----------------------------------filter((orders.o_orderdate <= '1996-12-31') and (orders.o_orderdate >= '1995-01-01')) -------------------------------------PhysicalOlapScan[orders] apply RFs: RF3 RF4 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF2 p_partkey->[l_partkey] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 -------------------------------------PhysicalProject ---------------------------------------filter((part.p_type = 'ECONOMY ANODIZED STEEL')) -----------------------------------------PhysicalOlapScan[part] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[c_nationkey] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((n1.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF0 r_regionkey->[n_regionkey] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[nation] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((region.r_name = 'AMERICA')) -----------------------------------------PhysicalOlapScan[region] ---------------------PhysicalProject -----------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q9.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q9.out deleted file mode 100644 index d70d5886607341..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q9.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF2 l_orderkey->[o_orderkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[orders] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF1 p_partkey->[l_partkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------filter((p_name like '%green%')) -----------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[nation] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.out b/regression-test/data/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.out deleted file mode 100644 index ba4c37059cb12c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.out +++ /dev/null @@ -1,36 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !rf_setop -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((T.l_linenumber = expr_cast(r_regionkey as BIGINT))) otherCondition=() build RFs:RF0 expr_cast(r_regionkey as BIGINT)->[cast(l_linenumber as BIGINT),o_orderkey] -------------PhysicalExcept ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------PhysicalOlapScan[lineitem] apply RFs: RF0 ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------PhysicalProject ---------------PhysicalOlapScan[region] - --- !rf_setop_expr -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_abs(l_linenumber) = expr_cast(r_regionkey as LARGEINT))) otherCondition=() build RFs:RF0 expr_cast(r_regionkey as LARGEINT)->[abs(cast(l_linenumber as BIGINT)),abs(o_orderkey)] -------------PhysicalProject ---------------PhysicalExcept -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------PhysicalProject ---------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q1.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q1.out deleted file mode 100644 index 22f0777694a7ce..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q1.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate <= '1998-09-02')) -------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q10.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q10.out deleted file mode 100644 index c13794f49a1c6e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q10.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF2 n_nationkey->[c_nationkey] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF1 o_custkey->[c_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] apply RFs: RF1 RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] -----------------------PhysicalProject -------------------------filter((lineitem.l_returnflag = 'R')) ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((orders.o_orderdate < '1994-01-01') and (orders.o_orderdate >= '1993-10-01')) ---------------------------PhysicalOlapScan[orders] ---------------PhysicalProject -----------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q11.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q11.out deleted file mode 100644 index 40df7553b0352f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q11.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------NestedLoopJoin[INNER_JOIN](cast(value as DOUBLE) > cast((sum((ps_supplycost * cast(ps_availqty as DECIMALV3(10, 0)))) * 0.000002) as DOUBLE)) -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] apply RFs: RF3 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF2 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] apply RFs: RF2 -------------------------PhysicalProject ---------------------------filter((nation.n_name = 'GERMANY')) -----------------------------PhysicalOlapScan[nation] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[ps_suppkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[partsupp] apply RFs: RF1 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((nation.n_name = 'GERMANY')) ---------------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q12.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q12.out deleted file mode 100644 index 8df830dd428e58..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q12.out +++ /dev/null @@ -1,17 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate) and (lineitem.l_receiptdate < '1995-01-01') and (lineitem.l_receiptdate >= '1994-01-01') and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate < lineitem.l_commitdate) and l_shipmode IN ('MAIL', 'SHIP')) -----------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q13.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q13.out deleted file mode 100644 index 5d94f6d22b76d8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q13.out +++ /dev/null @@ -1,19 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -----------------------PhysicalProject -------------------------filter(( not (o_comment like '%special%requests%'))) ---------------------------PhysicalOlapScan[orders] apply RFs: RF0 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q14.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q14.out deleted file mode 100644 index 6df1a05fa3b57f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q14.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 l_partkey->[p_partkey] ---------------PhysicalProject -----------------PhysicalOlapScan[part] apply RFs: RF0 ---------------PhysicalProject -----------------filter((lineitem.l_shipdate < '1995-10-01') and (lineitem.l_shipdate >= '1995-09-01')) -------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q15.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q15.out deleted file mode 100644 index 1d3d780435aab3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q15.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = revenue0.supplier_no)) otherCondition=() build RFs:RF0 supplier_no->[s_suppkey] -------------PhysicalProject ---------------PhysicalOlapScan[supplier] apply RFs: RF0 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((revenue0.total_revenue = max(total_revenue))) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------PhysicalOlapScan[lineitem] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q16.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q16.out deleted file mode 100644 index f04b0bc766338b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q16.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter(( not (p_brand = 'Brand#45')) and ( not (p_type like 'MEDIUM POLISHED%')) and p_size IN (14, 19, 23, 3, 36, 45, 49, 9)) -------------------------PhysicalOlapScan[part] -----------------PhysicalProject -------------------filter((s_comment like '%Customer%Complaints%')) ---------------------PhysicalOlapScan[supplier] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q17.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q17.out deleted file mode 100644 index 850c567ab4aa39..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q17.out +++ /dev/null @@ -1,20 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(l_quantity as DECIMALV3(38, 5)) < (0.2 * avg(cast(l_quantity as DECIMALV3(17, 4))) OVER(PARTITION BY p_partkey)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF0 p_partkey->[l_partkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((part.p_brand = 'Brand#23') and (part.p_container = 'MED BOX')) -----------------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q18.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q18.out deleted file mode 100644 index e4c5acd49e3fb3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q18.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF2 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------PhysicalOlapScan[lineitem] apply RFs: RF2 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF1 o_custkey->[c_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] apply RFs: RF1 -------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[orders] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((sum(l_quantity) > 300.00)) -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q19.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q19.out deleted file mode 100644 index 78faf3234691b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q19.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=(OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(lineitem.l_quantity <= 11.00),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(lineitem.l_quantity >= 10.00),(lineitem.l_quantity <= 20.00),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG'),(lineitem.l_quantity >= 20.00)]]) build RFs:RF0 p_partkey->[l_partkey] -------------PhysicalProject ---------------filter((lineitem.l_quantity <= 30.00) and (lineitem.l_quantity >= 1.00) and (lineitem.l_shipinstruct = 'DELIVER IN PERSON') and l_shipmode IN ('AIR REG', 'AIR')) -----------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------PhysicalProject ---------------filter((part.p_size <= 15) and (part.p_size >= 1) and OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG')]] and p_brand IN ('Brand#12', 'Brand#23', 'Brand#34') and p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG', 'MED BAG', 'MED BOX', 'MED PACK', 'MED PKG', 'SM BOX', 'SM CASE', 'SM PACK', 'SM PKG')) -----------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q2.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q2.out deleted file mode 100644 index fc7b45224906e4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q2.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((partsupp.ps_supplycost = min(ps_supplycost) OVER(PARTITION BY p_partkey))) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF2 p_partkey->[ps_partkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[partsupp] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((p_type like '%BRASS') and (part.p_size = 15)) -------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] ---------------------------PhysicalOlapScan[supplier] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF0 r_regionkey->[n_regionkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[nation] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((region.r_name = 'EUROPE')) -----------------------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.out deleted file mode 100644 index 6b7a6da490ce49..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[l_suppkey,ps_suppkey] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t2.l_partkey = t1.ps_partkey) and (t2.l_suppkey = t1.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > t2.l_q)) build RFs:RF2 ps_partkey->[l_partkey];RF3 ps_suppkey->[l_suppkey] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -----------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 RF4 -----------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[ps_partkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] apply RFs: RF1 RF4 -------------------PhysicalProject ---------------------filter((p_name like 'forest%')) -----------------------PhysicalOlapScan[part] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------PhysicalProject -------------------filter((nation.n_name = 'CANADA')) ---------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20.out deleted file mode 100644 index 6b3b115fcfee2f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[l_suppkey,ps_suppkey] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = partsupp.ps_partkey) and (lineitem.l_suppkey = partsupp.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > (0.5 * sum(l_quantity)))) build RFs:RF2 ps_partkey->[l_partkey];RF3 ps_suppkey->[l_suppkey] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 RF4 -----------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[ps_partkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] apply RFs: RF1 RF4 -------------------PhysicalProject ---------------------filter((p_name like 'forest%')) -----------------------PhysicalOlapScan[part] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------PhysicalProject -------------------filter((nation.n_name = 'CANADA')) ---------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q21.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q21.out deleted file mode 100644 index 0436a7b245b174..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q21.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((l2.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF4 l_orderkey->[l_orderkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[lineitem] apply RFs: RF4 -------------------hashJoin[RIGHT_ANTI_JOIN colocated] hashCondition=((l3.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF3 l_orderkey->[l_orderkey] ---------------------PhysicalProject -----------------------filter((l3.l_receiptdate > l3.l_commitdate)) -------------------------PhysicalOlapScan[lineitem] apply RFs: RF3 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = l1.l_orderkey)) otherCondition=() build RFs:RF2 l_orderkey->[o_orderkey] -------------------------PhysicalProject ---------------------------filter((orders.o_orderstatus = 'F')) -----------------------------PhysicalOlapScan[orders] apply RFs: RF2 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = l1.l_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[l_suppkey] -----------------------------PhysicalProject -------------------------------filter((l1.l_receiptdate > l1.l_commitdate)) ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[supplier] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------filter((nation.n_name = 'SAUDI ARABIA')) -------------------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q22.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q22.out deleted file mode 100644 index 63d82280b35b16..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q22.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_ANTI_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------NestedLoopJoin[INNER_JOIN](cast(c_acctbal as DECIMALV3(38, 4)) > avg(cast(c_acctbal as DECIMALV3(17, 4)))) -----------------------PhysicalProject -------------------------filter(substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------PhysicalOlapScan[customer] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((customer.c_acctbal > 0.00) and substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q3.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q3.out deleted file mode 100644 index 36d395afccaa2d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q3.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate > '1995-03-15')) -------------------PhysicalOlapScan[lineitem] apply RFs: RF1 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1995-03-15')) -----------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((customer.c_mktsegment = 'BUILDING')) -----------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q4.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q4.out deleted file mode 100644 index 19b73f24dc3315..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q4.out +++ /dev/null @@ -1,18 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate)) -----------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1993-10-01') and (orders.o_orderdate >= '1993-07-01')) -----------------------PhysicalOlapScan[orders] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q5.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q5.out deleted file mode 100644 index f2ebd3c8b31b86..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q5.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey) and (customer.c_nationkey = supplier.s_nationkey)) otherCondition=() build RFs:RF4 c_nationkey->[n_nationkey,s_nationkey];RF5 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF2 o_orderkey->[l_orderkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((orders.o_orderdate < '1995-01-01') and (orders.o_orderdate >= '1994-01-01')) -------------------------------PhysicalOlapScan[orders] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF1 RF4 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF0 r_regionkey->[n_regionkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[nation] apply RFs: RF0 RF4 -------------------------------PhysicalProject ---------------------------------filter((region.r_name = 'ASIA')) -----------------------------------PhysicalOlapScan[region] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q6.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q6.out deleted file mode 100644 index f1f764bec09499..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q6.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------filter((lineitem.l_discount <= 0.07) and (lineitem.l_discount >= 0.05) and (lineitem.l_quantity < 24.00) and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q7.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q7.out deleted file mode 100644 index 957b17a7402749..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q7.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=(OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]]) build RFs:RF4 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF3 l_orderkey->[o_orderkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[orders] apply RFs: RF3 RF4 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate <= '1996-12-31') and (lineitem.l_shipdate >= '1995-01-01')) -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[supplier] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------filter(n_name IN ('FRANCE', 'GERMANY')) -----------------------------------PhysicalOlapScan[nation] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n2.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[c_nationkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter(n_name IN ('FRANCE', 'GERMANY')) ---------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q8.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q8.out deleted file mode 100644 index ef2b3523bbbe98..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q8.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n2.n_nationkey)) otherCondition=() build RFs:RF6 n_nationkey->[s_nationkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF5 l_suppkey->[s_suppkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] apply RFs: RF5 RF6 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF4 c_custkey->[o_custkey] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF3 l_orderkey->[o_orderkey] ---------------------------------PhysicalProject -----------------------------------filter((orders.o_orderdate <= '1996-12-31') and (orders.o_orderdate >= '1995-01-01')) -------------------------------------PhysicalOlapScan[orders] apply RFs: RF3 RF4 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF2 p_partkey->[l_partkey] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 -------------------------------------PhysicalProject ---------------------------------------filter((part.p_type = 'ECONOMY ANODIZED STEEL')) -----------------------------------------PhysicalOlapScan[part] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[c_nationkey] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((n1.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF0 r_regionkey->[n_regionkey] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[nation] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((region.r_name = 'AMERICA')) -----------------------------------------PhysicalOlapScan[region] ---------------------PhysicalProject -----------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q9.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q9.out deleted file mode 100644 index cd77e18b02c916..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q9.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF4 ps_suppkey->[l_suppkey,s_suppkey];RF5 ps_partkey->[l_partkey,p_partkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF2 l_orderkey->[o_orderkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[orders] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF1 p_partkey->[l_partkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF3 RF4 RF5 -------------------------------PhysicalProject ---------------------------------filter((p_name like '%green%')) -----------------------------------PhysicalOlapScan[part] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF0 RF4 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[nation] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.out deleted file mode 100644 index 22f0777694a7ce..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate <= '1998-09-02')) -------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.out deleted file mode 100644 index 0820ef5c2c6526..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF2 n_nationkey->[c_nationkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] -----------------------PhysicalProject -------------------------filter((lineitem.l_returnflag = 'R')) ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] ---------------------------PhysicalProject -----------------------------filter((orders.o_orderdate < '1994-01-01') and (orders.o_orderdate >= '1993-10-01')) -------------------------------PhysicalOlapScan[orders] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF2 -------------------PhysicalProject ---------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.out deleted file mode 100644 index 6d414f1f56a4f4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------NestedLoopJoin[INNER_JOIN](cast(value as DOUBLE) > cast((sum((ps_supplycost * cast(ps_availqty as DECIMALV3(10, 0)))) * 0.000002) as DOUBLE)) -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF3 n_nationkey->[s_nationkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[ps_suppkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[partsupp] apply RFs: RF2 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((nation.n_name = 'GERMANY')) -------------------------PhysicalOlapScan[nation] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[ps_suppkey] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[supplier] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter((nation.n_name = 'GERMANY')) -----------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.out deleted file mode 100644 index 8df830dd428e58..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.out +++ /dev/null @@ -1,17 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate) and (lineitem.l_receiptdate < '1995-01-01') and (lineitem.l_receiptdate >= '1994-01-01') and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate < lineitem.l_commitdate) and l_shipmode IN ('MAIL', 'SHIP')) -----------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.out deleted file mode 100644 index 5d94f6d22b76d8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.out +++ /dev/null @@ -1,19 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -----------------------PhysicalProject -------------------------filter(( not (o_comment like '%special%requests%'))) ---------------------------PhysicalOlapScan[orders] apply RFs: RF0 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.out deleted file mode 100644 index d6457aadcccc10..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[l_partkey] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate < '1995-10-01') and (lineitem.l_shipdate >= '1995-09-01')) -------------------PhysicalOlapScan[lineitem] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.out deleted file mode 100644 index e9b45b5888ce54..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((revenue0.total_revenue = max(total_revenue))) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = revenue0.supplier_no)) otherCondition=() build RFs:RF0 s_suppkey->[l_suppkey] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -------------------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.out deleted file mode 100644 index 7b04caaf3e087a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] -------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((s_comment like '%Customer%Complaints%')) -------------------------PhysicalOlapScan[supplier] -------------------PhysicalProject ---------------------filter(( not (p_brand = 'Brand#45')) and ( not (p_type like 'MEDIUM POLISHED%')) and p_size IN (14, 19, 23, 3, 36, 45, 49, 9)) -----------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.out deleted file mode 100644 index 92cc8c93708400..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.out +++ /dev/null @@ -1,19 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(l_quantity as DECIMALV3(38, 5)) < (0.2 * avg(cast(l_quantity as DECIMALV3(17, 4))) OVER(PARTITION BY p_partkey)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF0 p_partkey->[l_partkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((part.p_brand = 'Brand#23') and (part.p_container = 'MED BOX')) ---------------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.out deleted file mode 100644 index 44c12faa6d47d5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF2 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------PhysicalOlapScan[lineitem] apply RFs: RF2 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF1 c_custkey->[o_custkey] -------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[orders] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------filter((sum(l_quantity) > 300.00)) -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.out deleted file mode 100644 index 78faf3234691b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=(OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(lineitem.l_quantity <= 11.00),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(lineitem.l_quantity >= 10.00),(lineitem.l_quantity <= 20.00),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG'),(lineitem.l_quantity >= 20.00)]]) build RFs:RF0 p_partkey->[l_partkey] -------------PhysicalProject ---------------filter((lineitem.l_quantity <= 30.00) and (lineitem.l_quantity >= 1.00) and (lineitem.l_shipinstruct = 'DELIVER IN PERSON') and l_shipmode IN ('AIR REG', 'AIR')) -----------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------PhysicalProject ---------------filter((part.p_size <= 15) and (part.p_size >= 1) and OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG')]] and p_brand IN ('Brand#12', 'Brand#23', 'Brand#34') and p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG', 'MED BAG', 'MED BOX', 'MED PACK', 'MED PKG', 'SM BOX', 'SM CASE', 'SM PACK', 'SM PKG')) -----------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.out deleted file mode 100644 index c1a68c315e06b2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((partsupp.ps_supplycost = min(ps_supplycost) OVER(PARTITION BY p_partkey))) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF3 r_regionkey->[n_regionkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF2 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[ps_suppkey] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 RF1 ---------------------------------PhysicalProject -----------------------------------filter((p_type like '%BRASS') and (part.p_size = 15)) -------------------------------------PhysicalOlapScan[part] -----------------------------PhysicalOlapScan[supplier] apply RFs: RF2 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[nation] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((region.r_name = 'EUROPE')) -------------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.out deleted file mode 100644 index 89548468b7c1ae..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] -------------PhysicalProject ---------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey,ps_suppkey] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t2.l_partkey = t1.ps_partkey) and (t2.l_suppkey = t1.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > t2.l_q)) build RFs:RF1 ps_partkey->[l_partkey];RF2 ps_suppkey->[l_suppkey] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 ---------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 RF3 -----------------------PhysicalProject -------------------------filter((p_name like 'forest%')) ---------------------------PhysicalOlapScan[part] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF4 -------------PhysicalProject ---------------filter((nation.n_name = 'CANADA')) -----------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.out deleted file mode 100644 index 7678db3199aef2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] -------------PhysicalProject ---------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey,ps_suppkey] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = partsupp.ps_partkey) and (lineitem.l_suppkey = partsupp.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > (0.5 * sum(l_quantity)))) build RFs:RF1 ps_partkey->[l_partkey];RF2 ps_suppkey->[l_suppkey] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 ---------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 RF3 -----------------------PhysicalProject -------------------------filter((p_name like 'forest%')) ---------------------------PhysicalOlapScan[part] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF4 -------------PhysicalProject ---------------filter((nation.n_name = 'CANADA')) -----------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.out deleted file mode 100644 index c54a6b502f590d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = l1.l_orderkey)) otherCondition=() build RFs:RF3 o_orderkey->[l_orderkey,l_orderkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = l1.l_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey] ---------------------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((l2.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF1 l_orderkey->[l_orderkey] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF3 -----------------------------hashJoin[RIGHT_ANTI_JOIN colocated] hashCondition=((l3.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF0 l_orderkey->[l_orderkey] -------------------------------PhysicalProject ---------------------------------filter((l3.l_receiptdate > l3.l_commitdate)) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((l1.l_receiptdate > l1.l_commitdate)) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF4 -----------------------PhysicalProject -------------------------filter((orders.o_orderstatus = 'F')) ---------------------------PhysicalOlapScan[orders] -------------------PhysicalProject ---------------------filter((nation.n_name = 'SAUDI ARABIA')) -----------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.out deleted file mode 100644 index 63d82280b35b16..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_ANTI_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------NestedLoopJoin[INNER_JOIN](cast(c_acctbal as DECIMALV3(38, 4)) > avg(cast(c_acctbal as DECIMALV3(17, 4)))) -----------------------PhysicalProject -------------------------filter(substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------PhysicalOlapScan[customer] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((customer.c_acctbal > 0.00) and substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.out deleted file mode 100644 index 48d4e37ec466e3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate > '1995-03-15')) -------------------PhysicalOlapScan[lineitem] apply RFs: RF1 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1995-03-15')) -----------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((customer.c_mktsegment = 'BUILDING')) -----------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.out deleted file mode 100644 index 19b73f24dc3315..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.out +++ /dev/null @@ -1,18 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate)) -----------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1993-10-01') and (orders.o_orderdate >= '1993-07-01')) -----------------------PhysicalOlapScan[orders] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.out deleted file mode 100644 index 640dabeb70f0bb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF5 r_regionkey->[n_regionkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[c_nationkey,s_nationkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = supplier.s_nationkey) and (lineitem.l_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey];RF3 s_nationkey->[c_nationkey] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -----------------------------------PhysicalProject -------------------------------------filter((orders.o_orderdate < '1995-01-01') and (orders.o_orderdate >= '1994-01-01')) ---------------------------------------PhysicalOlapScan[orders] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 RF4 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF4 -----------------------PhysicalProject -------------------------PhysicalOlapScan[nation] apply RFs: RF5 -------------------PhysicalProject ---------------------filter((region.r_name = 'ASIA')) -----------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.out deleted file mode 100644 index f1f764bec09499..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------filter((lineitem.l_discount <= 0.07) and (lineitem.l_discount >= 0.05) and (lineitem.l_quantity < 24.00) and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.out deleted file mode 100644 index b98149f8668a1c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n2.n_nationkey) and (supplier.s_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF3 n_nationkey->[c_nationkey];RF4 n_nationkey->[s_nationkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF2 c_custkey->[o_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF1 l_orderkey->[o_orderkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[orders] apply RFs: RF1 RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[l_suppkey] -------------------------------PhysicalProject ---------------------------------filter((lineitem.l_shipdate <= '1996-12-31') and (lineitem.l_shipdate >= '1995-01-01')) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[supplier] apply RFs: RF4 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------NestedLoopJoin[INNER_JOIN]OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]] ---------------------PhysicalProject -----------------------filter(n_name IN ('FRANCE', 'GERMANY')) -------------------------PhysicalOlapScan[nation] ---------------------PhysicalProject -----------------------filter(n_name IN ('FRANCE', 'GERMANY')) -------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.out deleted file mode 100644 index f3abaf5956c1a6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((n1.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF6 r_regionkey->[n_regionkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n2.n_nationkey)) otherCondition=() build RFs:RF5 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[c_nationkey] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF3 c_custkey->[o_custkey] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF1 p_partkey->[l_partkey] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 RF1 RF2 ---------------------------------------------PhysicalProject -----------------------------------------------filter((orders.o_orderdate <= '1996-12-31') and (orders.o_orderdate >= '1995-01-01')) -------------------------------------------------PhysicalOlapScan[orders] apply RFs: RF3 -----------------------------------------PhysicalProject -------------------------------------------filter((part.p_type = 'ECONOMY ANODIZED STEEL')) ---------------------------------------------PhysicalOlapScan[part] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[supplier] apply RFs: RF5 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[nation] apply RFs: RF6 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[nation] ---------------------PhysicalProject -----------------------filter((region.r_name = 'AMERICA')) -------------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.out deleted file mode 100644 index ab51e5595023a9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF5 s_suppkey->[l_suppkey,ps_suppkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF4 p_partkey->[l_partkey,ps_partkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF2 ps_suppkey->[l_suppkey];RF3 ps_partkey->[l_partkey] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 RF4 RF5 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[orders] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[partsupp] apply RFs: RF4 RF5 -----------------------PhysicalProject -------------------------filter((p_name like '%green%')) ---------------------------PhysicalOlapScan[part] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------------PhysicalProject -------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.out b/regression-test/data/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.out index 285b861b742c5b..759b6c890ea13e 100644 --- a/regression-test/data/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.out +++ b/regression-test/data/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.out @@ -92,3 +92,11 @@ -- !sql -- \N +-- !sql1 -- +54 2001:db8:4::/128 +55 \N + +-- !sql2 -- +\N \N +2001:db8:4::/128 false + diff --git a/regression-test/data/nereids_clickbench_shape_p0/query1.out b/regression-test/data/shape_check/clickbench/query1.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query1.out rename to regression-test/data/shape_check/clickbench/query1.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query10.out b/regression-test/data/shape_check/clickbench/query10.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query10.out rename to regression-test/data/shape_check/clickbench/query10.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query11.out b/regression-test/data/shape_check/clickbench/query11.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query11.out rename to regression-test/data/shape_check/clickbench/query11.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query12.out b/regression-test/data/shape_check/clickbench/query12.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query12.out rename to regression-test/data/shape_check/clickbench/query12.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query13.out b/regression-test/data/shape_check/clickbench/query13.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query13.out rename to regression-test/data/shape_check/clickbench/query13.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query14.out b/regression-test/data/shape_check/clickbench/query14.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query14.out rename to regression-test/data/shape_check/clickbench/query14.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query15.out b/regression-test/data/shape_check/clickbench/query15.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query15.out rename to regression-test/data/shape_check/clickbench/query15.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query16.out b/regression-test/data/shape_check/clickbench/query16.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query16.out rename to regression-test/data/shape_check/clickbench/query16.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query17.out b/regression-test/data/shape_check/clickbench/query17.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query17.out rename to regression-test/data/shape_check/clickbench/query17.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query18.out b/regression-test/data/shape_check/clickbench/query18.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query18.out rename to regression-test/data/shape_check/clickbench/query18.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query19.out b/regression-test/data/shape_check/clickbench/query19.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query19.out rename to regression-test/data/shape_check/clickbench/query19.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query2.out b/regression-test/data/shape_check/clickbench/query2.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query2.out rename to regression-test/data/shape_check/clickbench/query2.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query20.out b/regression-test/data/shape_check/clickbench/query20.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query20.out rename to regression-test/data/shape_check/clickbench/query20.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query21.out b/regression-test/data/shape_check/clickbench/query21.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query21.out rename to regression-test/data/shape_check/clickbench/query21.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query22.out b/regression-test/data/shape_check/clickbench/query22.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query22.out rename to regression-test/data/shape_check/clickbench/query22.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query23.out b/regression-test/data/shape_check/clickbench/query23.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query23.out rename to regression-test/data/shape_check/clickbench/query23.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query24.out b/regression-test/data/shape_check/clickbench/query24.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query24.out rename to regression-test/data/shape_check/clickbench/query24.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query25.out b/regression-test/data/shape_check/clickbench/query25.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query25.out rename to regression-test/data/shape_check/clickbench/query25.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query26.out b/regression-test/data/shape_check/clickbench/query26.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query26.out rename to regression-test/data/shape_check/clickbench/query26.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query27.out b/regression-test/data/shape_check/clickbench/query27.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query27.out rename to regression-test/data/shape_check/clickbench/query27.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query28.out b/regression-test/data/shape_check/clickbench/query28.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query28.out rename to regression-test/data/shape_check/clickbench/query28.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query29.out b/regression-test/data/shape_check/clickbench/query29.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query29.out rename to regression-test/data/shape_check/clickbench/query29.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query3.out b/regression-test/data/shape_check/clickbench/query3.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query3.out rename to regression-test/data/shape_check/clickbench/query3.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query30.out b/regression-test/data/shape_check/clickbench/query30.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query30.out rename to regression-test/data/shape_check/clickbench/query30.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query31.out b/regression-test/data/shape_check/clickbench/query31.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query31.out rename to regression-test/data/shape_check/clickbench/query31.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query32.out b/regression-test/data/shape_check/clickbench/query32.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query32.out rename to regression-test/data/shape_check/clickbench/query32.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query33.out b/regression-test/data/shape_check/clickbench/query33.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query33.out rename to regression-test/data/shape_check/clickbench/query33.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query34.out b/regression-test/data/shape_check/clickbench/query34.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query34.out rename to regression-test/data/shape_check/clickbench/query34.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query35.out b/regression-test/data/shape_check/clickbench/query35.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query35.out rename to regression-test/data/shape_check/clickbench/query35.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query36.out b/regression-test/data/shape_check/clickbench/query36.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query36.out rename to regression-test/data/shape_check/clickbench/query36.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query37.out b/regression-test/data/shape_check/clickbench/query37.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query37.out rename to regression-test/data/shape_check/clickbench/query37.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query38.out b/regression-test/data/shape_check/clickbench/query38.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query38.out rename to regression-test/data/shape_check/clickbench/query38.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query39.out b/regression-test/data/shape_check/clickbench/query39.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query39.out rename to regression-test/data/shape_check/clickbench/query39.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query4.out b/regression-test/data/shape_check/clickbench/query4.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query4.out rename to regression-test/data/shape_check/clickbench/query4.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query40.out b/regression-test/data/shape_check/clickbench/query40.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query40.out rename to regression-test/data/shape_check/clickbench/query40.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query41.out b/regression-test/data/shape_check/clickbench/query41.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query41.out rename to regression-test/data/shape_check/clickbench/query41.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query42.out b/regression-test/data/shape_check/clickbench/query42.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query42.out rename to regression-test/data/shape_check/clickbench/query42.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query43.out b/regression-test/data/shape_check/clickbench/query43.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query43.out rename to regression-test/data/shape_check/clickbench/query43.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query5.out b/regression-test/data/shape_check/clickbench/query5.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query5.out rename to regression-test/data/shape_check/clickbench/query5.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query6.out b/regression-test/data/shape_check/clickbench/query6.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query6.out rename to regression-test/data/shape_check/clickbench/query6.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query7.out b/regression-test/data/shape_check/clickbench/query7.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query7.out rename to regression-test/data/shape_check/clickbench/query7.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query8.out b/regression-test/data/shape_check/clickbench/query8.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query8.out rename to regression-test/data/shape_check/clickbench/query8.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query9.out b/regression-test/data/shape_check/clickbench/query9.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query9.out rename to regression-test/data/shape_check/clickbench/query9.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/flat.out b/regression-test/data/shape_check/ssb_sf100/shape/flat.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/flat.out rename to regression-test/data/shape_check/ssb_sf100/shape/flat.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q1.1.out b/regression-test/data/shape_check/ssb_sf100/shape/q1.1.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q1.1.out rename to regression-test/data/shape_check/ssb_sf100/shape/q1.1.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q1.2.out b/regression-test/data/shape_check/ssb_sf100/shape/q1.2.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q1.2.out rename to regression-test/data/shape_check/ssb_sf100/shape/q1.2.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q1.3.out b/regression-test/data/shape_check/ssb_sf100/shape/q1.3.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q1.3.out rename to regression-test/data/shape_check/ssb_sf100/shape/q1.3.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q2.1.out b/regression-test/data/shape_check/ssb_sf100/shape/q2.1.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q2.1.out rename to regression-test/data/shape_check/ssb_sf100/shape/q2.1.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q2.2.out b/regression-test/data/shape_check/ssb_sf100/shape/q2.2.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q2.2.out rename to regression-test/data/shape_check/ssb_sf100/shape/q2.2.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q2.3.out b/regression-test/data/shape_check/ssb_sf100/shape/q2.3.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q2.3.out rename to regression-test/data/shape_check/ssb_sf100/shape/q2.3.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.1.out b/regression-test/data/shape_check/ssb_sf100/shape/q3.1.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.1.out rename to regression-test/data/shape_check/ssb_sf100/shape/q3.1.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.2.out b/regression-test/data/shape_check/ssb_sf100/shape/q3.2.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.2.out rename to regression-test/data/shape_check/ssb_sf100/shape/q3.2.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.3.out b/regression-test/data/shape_check/ssb_sf100/shape/q3.3.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.3.out rename to regression-test/data/shape_check/ssb_sf100/shape/q3.3.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.4.out b/regression-test/data/shape_check/ssb_sf100/shape/q3.4.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.4.out rename to regression-test/data/shape_check/ssb_sf100/shape/q3.4.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q4.1.out b/regression-test/data/shape_check/ssb_sf100/shape/q4.1.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q4.1.out rename to regression-test/data/shape_check/ssb_sf100/shape/q4.1.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q4.2.out b/regression-test/data/shape_check/ssb_sf100/shape/q4.2.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q4.2.out rename to regression-test/data/shape_check/ssb_sf100/shape/q4.2.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q4.3.out b/regression-test/data/shape_check/ssb_sf100/shape/q4.3.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q4.3.out rename to regression-test/data/shape_check/ssb_sf100/shape/q4.3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/constraints/query23.out b/regression-test/data/shape_check/tpcds_sf100/constraints/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/constraints/query23.out rename to regression-test/data/shape_check/tpcds_sf100/constraints/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query1.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query1.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query1.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query1.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query10.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query10.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query10.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query10.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query11.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query11.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query11.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query11.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query12.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query12.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query12.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query12.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query13.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query13.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query14.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query14.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query14.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query14.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query15.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query15.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query15.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query15.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query16.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query16.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query16.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query16.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query17.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query17.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query18.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query18.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query18.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query18.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query19.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query19.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query2.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query2.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query2.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query2.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query20.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query20.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query20.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query20.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query21.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query21.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query21.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query21.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query22.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query22.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query22.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query22.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query23.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query23.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query24.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query24.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query24.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query24.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query25.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query25.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query26.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query26.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query26.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query26.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query27.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query27.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query27.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query27.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query28.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query28.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query28.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query28.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query29.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query29.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query3.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query3.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query3.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query30.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query30.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query30.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query30.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query31.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query31.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query31.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query31.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query32.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query32.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query32.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query32.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query33.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query33.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query33.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query33.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query34.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query34.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query34.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query34.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query35.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query35.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query35.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query35.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query36.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query36.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query36.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query36.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query37.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query37.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query37.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query37.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query38.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query38.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query38.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query38.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query39.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query39.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query39.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query39.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query4.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query4.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query4.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query4.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query40.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query40.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query40.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query40.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query41.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query41.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query41.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query41.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query42.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query42.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query42.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query42.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query43.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query43.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query43.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query43.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query44.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query44.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query45.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query45.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query46.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query46.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query46.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query46.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query47.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query47.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query47.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query47.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query48.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query48.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query48.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query48.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query49.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query49.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query49.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query49.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query5.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query5.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query5.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query5.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query50.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query50.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query50.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query50.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query51.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query51.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query51.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query51.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query52.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query52.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query52.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query52.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query53.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query53.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query53.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query53.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query54.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query54.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query54.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query55.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query55.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query55.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query56.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query56.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query57.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query57.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query57.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query57.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query58.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query58.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query59.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query59.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query59.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query59.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query6.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query6.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query60.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query60.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query60.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query60.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query61.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query61.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query62.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query62.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query62.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query62.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query63.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query63.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query63.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query63.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query64.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query64.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query65.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query65.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query66.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query66.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query66.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query66.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query67.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query67.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query67.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query67.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query68.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query68.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query69.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query69.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query69.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query69.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query7.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query7.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query7.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query7.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query70.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query70.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query70.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query70.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query71.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query71.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query71.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query71.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query72.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query72.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query72.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query72.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query73.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query73.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query73.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query73.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query74.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query74.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query74.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query74.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query75.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query75.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query75.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query75.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query76.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query76.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query76.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query76.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query77.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query77.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query77.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query77.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query78.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query78.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query78.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query78.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query79.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query79.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query79.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query79.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query8.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query8.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query80.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query80.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query80.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query80.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query81.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query81.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query81.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query81.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query82.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query82.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query82.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query82.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query83.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query83.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query84.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query84.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query84.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query84.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query85.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query85.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query85.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query85.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query86.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query86.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query86.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query86.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query87.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query87.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query87.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query87.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query88.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query88.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query88.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query88.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query89.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query89.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query89.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query89.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query9.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query9.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query9.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query9.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query90.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query90.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query90.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query90.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query91.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query91.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query91.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query92.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query92.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query92.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query92.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query93.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query93.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query93.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query93.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query94.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query94.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query94.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query96.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query96.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query96.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query96.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query97.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query97.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query97.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query97.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query98.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query98.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query98.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query98.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query99.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query99.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query99.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query99.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query1.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query1.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query1.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query1.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query10.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query10.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query10.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query10.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query11.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query11.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query11.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query11.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query12.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query12.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query12.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query12.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query13.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query13.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query14.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query14.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query14.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query14.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query15.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query15.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query15.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query15.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query16.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query16.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query16.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query16.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query17.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query17.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query18.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query18.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query18.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query18.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query19.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query19.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query2.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query2.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query2.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query2.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query20.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query20.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query20.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query20.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query21.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query21.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query21.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query21.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query22.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query22.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query22.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query22.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query23.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query23.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query24.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query24.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query25.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query25.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query26.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query26.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query26.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query26.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query27.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query27.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query27.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query27.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query28.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query28.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query28.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query28.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query29.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query29.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query3.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query3.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query3.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query30.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query30.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query30.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query30.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query31.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query31.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query31.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query31.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query32.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query32.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query32.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query32.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query33.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query33.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query33.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query33.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query34.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query34.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query34.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query34.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query35.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query35.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query35.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query35.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query36.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query36.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query36.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query36.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query37.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query37.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query37.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query37.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query38.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query38.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query38.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query38.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query39.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query39.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query39.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query39.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query4.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query4.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query4.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query4.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query40.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query40.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query40.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query40.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query41.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query41.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query41.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query41.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query42.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query42.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query42.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query42.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query43.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query43.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query43.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query43.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query44.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query44.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query45.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query45.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query46.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query46.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query46.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query46.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query47.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query47.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query47.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query47.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query48.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query48.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query48.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query48.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query49.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query49.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query49.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query49.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query5.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query5.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query5.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query5.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query50.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query50.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query50.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query50.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query51.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query51.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query51.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query51.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query52.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query52.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query52.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query52.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query53.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query53.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query53.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query53.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query54.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query54.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query54.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query55.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query55.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query55.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query56.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query56.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query57.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query57.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query57.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query57.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query58.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query58.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query59.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query59.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query59.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query59.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query6.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query6.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query60.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query60.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query60.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query60.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query61.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query61.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query62.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query62.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query62.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query62.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query63.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query63.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query63.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query63.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query64.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query64.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query65.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query65.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query66.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query66.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query66.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query66.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query67.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query67.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query67.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query67.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query68.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query68.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query69.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query69.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query69.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query69.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query7.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query7.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query7.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query7.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query70.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query70.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query70.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query70.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query71.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query71.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query71.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query71.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query72.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query72.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query73.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query73.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query73.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query73.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query74.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query74.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query74.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query74.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query75.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query75.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query75.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query75.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query76.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query76.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query76.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query76.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query77.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query77.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query77.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query77.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query78.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query78.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query78.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query78.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query79.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query79.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query79.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query79.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query8.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query8.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query80.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query80.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query80.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query80.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query81.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query81.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query81.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query81.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query82.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query82.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query82.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query82.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query83.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query83.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query84.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query84.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query84.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query84.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query85.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query85.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query85.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query85.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query86.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query86.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query86.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query86.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query87.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query87.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query87.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query87.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query88.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query88.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query88.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query88.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query89.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query89.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query89.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query89.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query9.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query9.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query9.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query9.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query90.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query90.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query90.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query90.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query91.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query91.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query91.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query92.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query92.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query92.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query92.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query93.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query93.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query93.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query93.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query94.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query94.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query94.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query96.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query96.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query96.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query96.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query97.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query97.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query97.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query97.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query98.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query98.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query98.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query98.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query99.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query99.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query99.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query99.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query1.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query1.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query1.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query1.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query10.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query10.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query10.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query10.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query11.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query11.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query11.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query11.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query12.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query12.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query12.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query12.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query13.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query13.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query14.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query14.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query14.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query14.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query15.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query15.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query15.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query15.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query16.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query16.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query16.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query16.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query17.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query17.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query17.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query17.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query18.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query18.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query18.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query18.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query19.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query19.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query2.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query2.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query2.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query2.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query20.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query20.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query20.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query20.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query21.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query21.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query21.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query21.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query22.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query22.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query22.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query22.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query23.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query23.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query24.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query24.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query24.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query24.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query25.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query25.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query25.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query25.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query26.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query26.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query26.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query26.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query27.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query27.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query27.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query27.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query28.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query28.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query28.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query28.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query29.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query29.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query29.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query29.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query3.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query3.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query3.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query30.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query30.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query30.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query30.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query31.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query31.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query31.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query31.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query32.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query32.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query32.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query32.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query33.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query33.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query33.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query33.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query34.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query34.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query34.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query34.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query35.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query35.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query35.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query35.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query36.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query36.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query36.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query36.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query37.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query37.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query37.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query37.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query38.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query38.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query38.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query38.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query39.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query39.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query39.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query39.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query4.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query4.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query4.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query4.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query40.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query40.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query40.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query40.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query41.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query41.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query41.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query41.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query42.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query42.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query42.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query42.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query43.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query43.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query43.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query43.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query44.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query44.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query45.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query45.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query46.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query46.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query46.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query46.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query47.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query47.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query47.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query47.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query48.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query48.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query48.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query48.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query49.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query49.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query49.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query49.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query5.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query5.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query5.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query5.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query50.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query50.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query50.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query50.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query51.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query51.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query51.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query51.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query52.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query52.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query52.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query52.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query53.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query53.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query53.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query53.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query54.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query54.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query54.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query55.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query55.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query55.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query56.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query56.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query57.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query57.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query57.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query57.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query58.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query58.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query58.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query58.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query59.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query59.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query59.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query59.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query6.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query6.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query60.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query60.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query60.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query60.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query61.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query61.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query62.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query62.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query62.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query62.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query63.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query63.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query63.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query63.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query64.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query64.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query64.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query64.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query65.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query65.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query65.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query65.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query66.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query66.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query66.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query66.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query67.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query67.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query67.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query67.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query68.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query68.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query69.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query69.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query69.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query69.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query7.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query7.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query7.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query7.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query70.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query70.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query70.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query70.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query71.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query71.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query71.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query71.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query72.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query72.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query72.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query72.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query73.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query73.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query73.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query73.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query74.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query74.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query74.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query74.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query75.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query75.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query75.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query75.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query76.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query76.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query76.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query76.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query77.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query77.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query77.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query77.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query78.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query78.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query78.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query78.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query79.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query79.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query79.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query79.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query8.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query8.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query80.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query80.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query80.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query80.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query81.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query81.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query81.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query81.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query82.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query82.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query82.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query82.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query83.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query83.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query83.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query83.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query84.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query84.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query84.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query84.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query85.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query85.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query85.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query85.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query86.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query86.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query86.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query86.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query87.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query87.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query87.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query87.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query88.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query88.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query88.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query88.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query89.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query89.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query89.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query89.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query9.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query9.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query9.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query9.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query90.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query90.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query90.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query90.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query91.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query91.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query91.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query92.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query92.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query92.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query92.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query93.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query93.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query93.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query93.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query94.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query94.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query94.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query95.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query95.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query96.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query96.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query96.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query96.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query97.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query97.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query97.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query97.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query98.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query98.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query98.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query98.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query99.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query99.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query99.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query99.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query1.out b/regression-test/data/shape_check/tpcds_sf100/shape/query1.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query1.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query1.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query10.out b/regression-test/data/shape_check/tpcds_sf100/shape/query10.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query10.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query10.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query11.out b/regression-test/data/shape_check/tpcds_sf100/shape/query11.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query11.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query11.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query12.out b/regression-test/data/shape_check/tpcds_sf100/shape/query12.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query12.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query12.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query13.out b/regression-test/data/shape_check/tpcds_sf100/shape/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query13.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query14.out b/regression-test/data/shape_check/tpcds_sf100/shape/query14.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query14.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query14.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query15.out b/regression-test/data/shape_check/tpcds_sf100/shape/query15.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query15.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query15.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query16.out b/regression-test/data/shape_check/tpcds_sf100/shape/query16.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query16.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query16.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query17.out b/regression-test/data/shape_check/tpcds_sf100/shape/query17.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query17.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query17.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query18.out b/regression-test/data/shape_check/tpcds_sf100/shape/query18.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query18.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query18.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query19.out b/regression-test/data/shape_check/tpcds_sf100/shape/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query19.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query2.out b/regression-test/data/shape_check/tpcds_sf100/shape/query2.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query2.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query2.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query20.out b/regression-test/data/shape_check/tpcds_sf100/shape/query20.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query20.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query20.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query21.out b/regression-test/data/shape_check/tpcds_sf100/shape/query21.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query21.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query21.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query22.out b/regression-test/data/shape_check/tpcds_sf100/shape/query22.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query22.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query22.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query23.out b/regression-test/data/shape_check/tpcds_sf100/shape/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query23.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query24.out b/regression-test/data/shape_check/tpcds_sf100/shape/query24.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query24.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query24.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query25.out b/regression-test/data/shape_check/tpcds_sf100/shape/query25.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query25.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query25.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query26.out b/regression-test/data/shape_check/tpcds_sf100/shape/query26.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query26.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query26.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query27.out b/regression-test/data/shape_check/tpcds_sf100/shape/query27.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query27.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query27.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query28.out b/regression-test/data/shape_check/tpcds_sf100/shape/query28.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query28.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query28.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query29.out b/regression-test/data/shape_check/tpcds_sf100/shape/query29.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query29.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query29.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query3.out b/regression-test/data/shape_check/tpcds_sf100/shape/query3.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query3.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query30.out b/regression-test/data/shape_check/tpcds_sf100/shape/query30.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query30.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query30.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query31.out b/regression-test/data/shape_check/tpcds_sf100/shape/query31.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query31.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query31.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query32.out b/regression-test/data/shape_check/tpcds_sf100/shape/query32.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query32.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query32.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query33.out b/regression-test/data/shape_check/tpcds_sf100/shape/query33.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query33.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query33.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query34.out b/regression-test/data/shape_check/tpcds_sf100/shape/query34.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query34.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query34.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query35.out b/regression-test/data/shape_check/tpcds_sf100/shape/query35.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query35.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query35.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query36.out b/regression-test/data/shape_check/tpcds_sf100/shape/query36.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query36.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query36.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query37.out b/regression-test/data/shape_check/tpcds_sf100/shape/query37.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query37.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query37.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query38.out b/regression-test/data/shape_check/tpcds_sf100/shape/query38.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query38.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query38.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query39.out b/regression-test/data/shape_check/tpcds_sf100/shape/query39.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query39.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query39.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query4.out b/regression-test/data/shape_check/tpcds_sf100/shape/query4.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query4.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query4.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query40.out b/regression-test/data/shape_check/tpcds_sf100/shape/query40.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query40.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query40.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query41.out b/regression-test/data/shape_check/tpcds_sf100/shape/query41.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query41.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query41.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query42.out b/regression-test/data/shape_check/tpcds_sf100/shape/query42.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query42.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query42.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query43.out b/regression-test/data/shape_check/tpcds_sf100/shape/query43.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query43.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query43.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query44.out b/regression-test/data/shape_check/tpcds_sf100/shape/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query44.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query45.out b/regression-test/data/shape_check/tpcds_sf100/shape/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query45.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query46.out b/regression-test/data/shape_check/tpcds_sf100/shape/query46.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query46.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query46.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query47.out b/regression-test/data/shape_check/tpcds_sf100/shape/query47.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query47.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query47.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query48.out b/regression-test/data/shape_check/tpcds_sf100/shape/query48.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query48.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query48.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query49.out b/regression-test/data/shape_check/tpcds_sf100/shape/query49.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query49.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query49.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query5.out b/regression-test/data/shape_check/tpcds_sf100/shape/query5.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query5.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query5.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query50.out b/regression-test/data/shape_check/tpcds_sf100/shape/query50.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query50.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query50.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query51.out b/regression-test/data/shape_check/tpcds_sf100/shape/query51.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query51.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query51.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query52.out b/regression-test/data/shape_check/tpcds_sf100/shape/query52.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query52.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query52.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query53.out b/regression-test/data/shape_check/tpcds_sf100/shape/query53.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query53.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query53.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query54.out b/regression-test/data/shape_check/tpcds_sf100/shape/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query54.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query54.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query55.out b/regression-test/data/shape_check/tpcds_sf100/shape/query55.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query55.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query56.out b/regression-test/data/shape_check/tpcds_sf100/shape/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query56.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query57.out b/regression-test/data/shape_check/tpcds_sf100/shape/query57.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query57.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query57.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query58.out b/regression-test/data/shape_check/tpcds_sf100/shape/query58.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query58.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query58.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query59.out b/regression-test/data/shape_check/tpcds_sf100/shape/query59.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query59.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query59.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query6.out b/regression-test/data/shape_check/tpcds_sf100/shape/query6.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query6.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query60.out b/regression-test/data/shape_check/tpcds_sf100/shape/query60.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query60.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query60.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query61.out b/regression-test/data/shape_check/tpcds_sf100/shape/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query61.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query62.out b/regression-test/data/shape_check/tpcds_sf100/shape/query62.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query62.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query62.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query63.out b/regression-test/data/shape_check/tpcds_sf100/shape/query63.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query63.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query63.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query64.out b/regression-test/data/shape_check/tpcds_sf100/shape/query64.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query64.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query64.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query65.out b/regression-test/data/shape_check/tpcds_sf100/shape/query65.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query65.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query65.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query66.out b/regression-test/data/shape_check/tpcds_sf100/shape/query66.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query66.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query66.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query67.out b/regression-test/data/shape_check/tpcds_sf100/shape/query67.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query67.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query67.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query68.out b/regression-test/data/shape_check/tpcds_sf100/shape/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query68.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query69.out b/regression-test/data/shape_check/tpcds_sf100/shape/query69.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query69.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query69.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query7.out b/regression-test/data/shape_check/tpcds_sf100/shape/query7.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query7.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query7.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query70.out b/regression-test/data/shape_check/tpcds_sf100/shape/query70.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query70.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query70.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query71.out b/regression-test/data/shape_check/tpcds_sf100/shape/query71.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query71.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query71.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query72.out b/regression-test/data/shape_check/tpcds_sf100/shape/query72.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query72.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query72.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query73.out b/regression-test/data/shape_check/tpcds_sf100/shape/query73.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query73.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query73.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query74.out b/regression-test/data/shape_check/tpcds_sf100/shape/query74.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query74.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query74.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query75.out b/regression-test/data/shape_check/tpcds_sf100/shape/query75.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query75.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query75.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query76.out b/regression-test/data/shape_check/tpcds_sf100/shape/query76.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query76.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query76.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query77.out b/regression-test/data/shape_check/tpcds_sf100/shape/query77.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query77.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query77.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query78.out b/regression-test/data/shape_check/tpcds_sf100/shape/query78.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query78.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query78.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query79.out b/regression-test/data/shape_check/tpcds_sf100/shape/query79.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query79.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query79.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query8.out b/regression-test/data/shape_check/tpcds_sf100/shape/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query8.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query80.out b/regression-test/data/shape_check/tpcds_sf100/shape/query80.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query80.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query80.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query81.out b/regression-test/data/shape_check/tpcds_sf100/shape/query81.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query81.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query81.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query82.out b/regression-test/data/shape_check/tpcds_sf100/shape/query82.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query82.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query82.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query83.out b/regression-test/data/shape_check/tpcds_sf100/shape/query83.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query83.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query83.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query84.out b/regression-test/data/shape_check/tpcds_sf100/shape/query84.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query84.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query84.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query85.out b/regression-test/data/shape_check/tpcds_sf100/shape/query85.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query85.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query85.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query86.out b/regression-test/data/shape_check/tpcds_sf100/shape/query86.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query86.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query86.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query87.out b/regression-test/data/shape_check/tpcds_sf100/shape/query87.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query87.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query87.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query88.out b/regression-test/data/shape_check/tpcds_sf100/shape/query88.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query88.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query88.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query89.out b/regression-test/data/shape_check/tpcds_sf100/shape/query89.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query89.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query89.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query9.out b/regression-test/data/shape_check/tpcds_sf100/shape/query9.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query9.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query9.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query90.out b/regression-test/data/shape_check/tpcds_sf100/shape/query90.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query90.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query90.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query91.out b/regression-test/data/shape_check/tpcds_sf100/shape/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query91.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query91.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query92.out b/regression-test/data/shape_check/tpcds_sf100/shape/query92.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query92.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query92.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query93.out b/regression-test/data/shape_check/tpcds_sf100/shape/query93.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query93.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query93.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query94.out b/regression-test/data/shape_check/tpcds_sf100/shape/query94.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query94.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query95.out b/regression-test/data/shape_check/tpcds_sf100/shape/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query95.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query96.out b/regression-test/data/shape_check/tpcds_sf100/shape/query96.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query96.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query96.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query97.out b/regression-test/data/shape_check/tpcds_sf100/shape/query97.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query97.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query97.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query98.out b/regression-test/data/shape_check/tpcds_sf100/shape/query98.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query98.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query98.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query99.out b/regression-test/data/shape_check/tpcds_sf100/shape/query99.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query99.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query99.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query13.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query13.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query19.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query19.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query44.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query44.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query44.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query45.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query45.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query45.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query54.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query54.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query54.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query56.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query56.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query56.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query6.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query6.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query61.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query61.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query68.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query68.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query8.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query8.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query91.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query91.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query91.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query95.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query95.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query95.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/eliminate_empty/query10_empty.out b/regression-test/data/shape_check/tpcds_sf1000/eliminate_empty/query10_empty.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/eliminate_empty/query10_empty.out rename to regression-test/data/shape_check/tpcds_sf1000/eliminate_empty/query10_empty.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query1.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query1.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query1.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query1.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query10.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query10.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query10.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query10.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query11.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query11.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query11.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query11.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query12.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query12.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query12.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query12.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query13.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query13.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query13.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query13.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query14.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query14.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query14.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query14.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query15.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query15.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query15.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query15.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query16.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query16.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query16.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query16.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query17.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query17.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query17.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query17.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query18.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query18.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query18.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query18.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query19.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query19.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query19.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query19.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query2.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query2.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query2.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query2.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query20.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query20.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query20.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query20.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query21.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query21.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query21.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query21.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query22.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query22.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query22.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query22.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query23.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query23.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query23.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query23.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query24.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query24.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query24.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query24.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query25.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query25.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query25.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query25.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query26.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query26.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query26.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query26.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query27.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query27.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query27.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query27.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query28.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query28.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query28.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query28.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query29.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query29.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query29.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query29.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query3.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query3.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query3.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query3.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query30.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query30.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query30.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query30.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query31.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query31.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query31.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query31.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query32.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query32.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query32.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query32.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query33.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query33.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query33.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query33.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query34.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query34.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query34.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query34.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query35.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query35.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query35.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query35.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query36.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query36.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query36.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query36.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query37.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query37.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query37.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query37.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query38.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query38.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query38.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query38.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query39.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query39.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query39.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query39.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query4.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query4.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query4.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query4.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query40.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query40.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query40.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query40.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query41.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query41.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query41.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query41.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query42.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query42.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query42.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query42.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query43.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query43.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query43.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query43.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query44.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query44.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query44.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query45.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query45.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query45.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query46.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query46.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query46.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query46.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query47.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query47.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query47.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query47.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query48.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query48.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query48.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query48.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query49.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query49.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query49.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query49.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query5.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query5.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query5.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query5.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query50.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query50.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query50.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query50.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query51.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query51.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query51.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query51.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query52.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query52.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query52.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query52.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query53.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query53.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query53.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query53.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query54.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query54.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query54.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query54.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query55.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query55.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query55.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query56.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query56.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query56.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query57.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query57.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query57.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query57.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query58.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query58.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query58.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query58.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query59.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query59.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query59.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query59.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query6.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query6.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query6.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query60.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query60.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query60.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query60.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query61.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query61.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query61.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query61.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query62.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query62.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query62.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query62.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query63.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query63.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query63.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query63.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query64.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query64.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query64.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query64.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query65.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query65.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query65.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query65.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query66.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query66.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query66.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query66.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query67.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query67.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query67.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query67.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query68.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query68.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query68.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query68.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query69.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query69.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query69.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query69.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query7.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query7.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query7.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query7.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query70.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query70.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query70.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query70.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query71.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query71.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query71.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query71.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query72.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query72.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query72.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query72.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query73.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query73.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query73.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query73.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query74.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query74.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query74.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query74.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query75.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query75.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query75.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query75.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query76.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query76.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query76.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query76.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query77.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query77.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query77.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query77.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query78.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query78.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query78.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query78.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query79.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query79.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query79.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query79.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query8.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query8.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query8.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query8.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query80.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query80.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query80.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query80.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query81.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query81.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query81.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query81.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query82.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query82.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query82.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query82.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query83.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query83.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query83.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query83.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query84.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query84.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query84.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query84.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query85.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query85.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query85.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query85.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query86.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query86.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query86.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query86.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query87.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query87.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query87.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query87.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query88.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query88.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query88.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query88.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query89.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query89.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query89.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query89.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query9.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query9.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query9.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query9.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query90.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query90.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query90.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query90.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query91.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query91.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query91.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query91.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query92.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query92.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query92.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query92.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query93.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query93.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query93.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query93.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query94.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query94.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query94.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query95.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query95.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query95.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query96.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query96.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query96.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query96.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query97.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query97.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query97.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query97.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query98.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query98.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query98.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query98.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query99.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query99.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query99.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query99.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query1.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query1.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query1.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query1.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query10.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query10.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query10.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query10.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query11.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query11.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query11.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query11.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query12.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query12.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query12.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query12.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query13.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query13.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query14.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query14.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query14.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query14.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query15.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query15.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query15.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query15.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query16.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query16.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query16.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query16.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query17.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query17.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query17.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query17.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query18.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query18.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query18.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query18.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query19.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query19.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query2.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query2.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query2.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query2.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query20.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query20.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query20.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query20.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query21.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query21.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query21.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query21.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query22.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query22.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query22.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query22.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query23.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query23.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query24.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query24.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query24.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query24.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query25.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query25.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query25.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query25.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query26.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query26.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query26.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query26.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query27.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query27.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query27.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query27.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query28.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query28.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query28.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query28.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query29.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query29.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query29.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query29.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query3.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query3.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query3.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query30.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query30.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query30.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query30.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query31.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query31.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query31.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query31.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query32.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query32.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query32.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query32.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query33.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query33.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query33.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query33.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query34.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query34.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query34.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query34.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query35.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query35.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query35.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query35.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query36.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query36.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query36.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query36.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query37.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query37.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query37.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query37.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query38.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query38.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query38.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query38.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query39.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query39.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query39.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query39.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query4.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query4.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query4.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query4.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query40.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query40.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query40.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query40.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query41.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query41.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query41.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query41.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query42.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query42.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query42.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query42.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query43.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query43.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query43.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query43.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query44.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query44.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query45.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query45.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query46.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query46.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query46.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query46.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query47.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query47.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query47.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query47.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query48.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query48.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query48.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query48.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query49.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query49.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query49.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query49.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query5.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query5.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query5.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query5.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query50.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query50.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query50.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query50.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query51.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query51.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query51.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query51.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query52.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query52.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query52.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query52.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query53.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query53.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query53.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query53.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query54.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query54.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query54.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query55.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query55.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query55.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query56.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query56.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query57.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query57.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query57.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query57.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query58.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query58.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query58.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query58.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query59.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query59.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query59.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query59.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query6.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query6.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query60.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query60.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query60.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query60.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query61.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query61.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query62.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query62.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query62.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query62.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query63.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query63.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query63.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query63.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query64.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query64.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query64.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query64.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query65.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query65.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query65.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query65.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query66.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query66.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query66.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query66.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query67.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query67.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query67.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query67.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query68.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query68.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query69.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query69.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query69.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query69.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query7.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query7.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query7.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query7.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query70.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query70.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query70.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query70.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query71.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query71.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query71.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query71.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query72.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query72.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query72.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query72.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query73.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query73.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query73.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query73.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query74.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query74.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query74.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query74.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query75.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query75.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query75.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query75.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query76.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query76.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query76.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query76.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query77.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query77.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query77.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query77.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query78.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query78.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query78.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query78.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query79.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query79.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query79.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query79.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query8.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query8.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query80.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query80.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query80.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query80.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query81.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query81.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query81.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query81.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query82.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query82.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query82.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query82.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query83.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query83.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query83.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query83.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query84.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query84.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query84.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query84.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query85.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query85.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query85.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query85.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query86.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query86.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query86.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query86.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query87.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query87.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query87.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query87.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query88.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query88.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query88.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query88.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query89.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query89.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query89.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query89.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query9.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query9.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query9.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query9.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query90.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query90.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query90.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query90.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query91.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query91.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query91.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query92.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query92.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query92.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query92.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query93.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query93.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query93.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query93.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query94.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query94.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query94.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query95.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query95.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query96.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query96.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query96.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query96.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query97.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query97.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query97.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query97.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query98.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query98.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query98.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query98.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query99.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query99.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query99.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query99.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query1.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query1.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query1.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query1.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query10.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query10.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query10.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query10.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query11.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query11.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query11.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query11.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query12.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query12.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query12.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query12.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query13.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query13.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query14.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query14.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query14.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query14.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query15.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query15.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query15.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query15.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query16.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query16.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query16.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query16.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query17.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query17.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query17.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query17.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query18.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query18.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query18.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query18.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query19.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query19.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query2.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query2.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query2.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query2.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query20.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query20.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query20.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query20.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query21.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query21.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query21.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query21.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query22.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query22.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query22.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query22.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query23.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query23.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query24.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query24.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query24.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query24.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query25.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query25.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query25.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query25.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query26.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query26.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query26.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query26.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query27.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query27.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query27.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query27.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query28.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query28.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query28.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query28.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query29.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query29.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query29.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query29.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query3.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query3.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query3.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query30.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query30.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query30.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query30.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query31.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query31.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query31.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query31.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query32.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query32.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query32.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query32.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query33.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query33.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query33.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query33.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query34.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query34.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query34.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query34.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query35.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query35.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query35.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query35.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query36.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query36.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query36.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query36.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query37.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query37.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query37.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query37.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query38.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query38.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query38.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query38.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query39.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query39.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query39.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query39.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query4.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query4.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query4.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query4.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query40.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query40.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query40.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query40.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query41.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query41.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query41.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query41.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query42.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query42.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query42.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query42.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query43.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query43.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query43.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query43.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query44.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query44.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query45.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query45.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query46.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query46.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query46.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query46.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query47.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query47.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query47.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query47.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query48.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query48.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query48.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query48.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query49.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query49.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query49.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query49.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query5.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query5.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query5.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query5.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query50.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query50.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query50.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query50.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query51.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query51.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query51.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query51.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query52.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query52.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query52.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query52.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query53.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query53.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query53.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query53.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query54.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query54.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query54.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query55.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query55.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query55.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query56.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query56.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query57.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query57.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query57.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query57.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query58.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query58.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query58.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query58.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query59.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query59.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query59.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query59.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query6.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query6.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query60.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query60.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query60.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query60.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query61.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query61.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query62.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query62.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query62.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query62.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query63.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query63.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query63.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query63.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query64.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query64.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query64.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query64.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query65.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query65.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query65.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query65.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query66.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query66.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query66.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query66.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query67.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query67.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query67.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query67.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query68.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query68.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query69.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query69.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query69.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query69.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query7.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query7.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query7.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query7.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query70.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query70.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query70.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query70.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query71.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query71.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query71.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query71.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query72.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query72.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query72.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query72.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query73.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query73.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query73.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query73.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query74.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query74.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query74.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query74.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query75.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query75.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query75.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query75.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query76.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query76.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query76.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query76.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query77.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query77.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query77.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query77.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query78.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query78.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query78.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query78.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query79.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query79.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query79.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query79.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query8.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query8.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query80.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query80.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query80.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query80.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query81.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query81.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query81.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query81.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query82.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query82.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query82.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query82.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query83.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query83.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query83.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query83.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query84.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query84.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query84.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query84.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query85.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query85.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query85.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query85.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query86.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query86.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query86.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query86.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query87.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query87.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query87.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query87.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query88.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query88.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query88.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query88.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query89.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query89.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query89.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query89.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query9.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query9.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query9.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query9.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query90.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query90.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query90.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query90.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query91.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query91.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query91.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query92.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query92.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query92.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query92.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query93.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query93.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query93.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query93.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query94.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query94.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query94.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query95.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query95.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query96.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query96.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query96.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query96.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query97.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query97.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query97.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query97.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query98.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query98.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query98.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query98.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query99.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query99.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query99.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query99.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q1.out b/regression-test/data/shape_check/tpch_sf1000/hint/q1.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q1.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q1.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q10.out b/regression-test/data/shape_check/tpch_sf1000/hint/q10.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q10.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q10.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q11.out b/regression-test/data/shape_check/tpch_sf1000/hint/q11.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q11.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q11.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q12.out b/regression-test/data/shape_check/tpch_sf1000/hint/q12.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q12.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q12.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q13.out b/regression-test/data/shape_check/tpch_sf1000/hint/q13.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q13.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q13.out diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q14.out b/regression-test/data/shape_check/tpch_sf1000/hint/q14.out similarity index 82% rename from regression-test/data/new_shapes_p0/hint_tpch/shape/q14.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q14.out index 3633709f96fa8a..d92539ff58dce4 100644 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q14.out +++ b/regression-test/data/shape_check/tpch_sf1000/hint/q14.out @@ -6,7 +6,7 @@ PhysicalResultSink ------PhysicalDistribute[DistributionSpecGather] --------hashAgg[LOCAL] ----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() +------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() --------------PhysicalProject ----------------PhysicalOlapScan[part] --------------PhysicalProject diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q15.out b/regression-test/data/shape_check/tpch_sf1000/hint/q15.out similarity index 95% rename from regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q15.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q15.out index 9e6b383230a34f..4e525326a19945 100644 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q15.out +++ b/regression-test/data/shape_check/tpch_sf1000/hint/q15.out @@ -9,14 +9,14 @@ PhysicalResultSink ------------PhysicalProject --------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = revenue0.supplier_no)) otherCondition=() ----------------PhysicalProject +------------------PhysicalOlapScan[supplier] +----------------PhysicalProject ------------------hashAgg[GLOBAL] --------------------PhysicalDistribute[DistributionSpecHash] ----------------------hashAgg[LOCAL] ------------------------PhysicalProject --------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) ----------------------------PhysicalOlapScan[lineitem] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] ------------hashAgg[GLOBAL] --------------PhysicalDistribute[DistributionSpecGather] ----------------hashAgg[LOCAL] @@ -28,3 +28,8 @@ PhysicalResultSink ----------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) ------------------------------PhysicalOlapScan[lineitem] +Hint log: +Used: leading(supplier revenue0 ) +UnUsed: +SyntaxError: + diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q17.out b/regression-test/data/shape_check/tpch_sf1000/hint/q17.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q17.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q17.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q19.out b/regression-test/data/shape_check/tpch_sf1000/hint/q19.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q19.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q19.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q3.out b/regression-test/data/shape_check/tpch_sf1000/hint/q3.out similarity index 88% rename from regression-test/data/nereids_hint_tpch_p0/shape/q3.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q3.out index a58bf8720464f4..0e310fdb46e2c0 100644 --- a/regression-test/data/nereids_hint_tpch_p0/shape/q3.out +++ b/regression-test/data/shape_check/tpch_sf1000/hint/q3.out @@ -6,7 +6,7 @@ PhysicalResultSink ------PhysicalTopN[LOCAL_SORT] --------hashAgg[LOCAL] ----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() +------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() --------------PhysicalProject ----------------filter((lineitem.l_shipdate > '1995-03-15')) ------------------PhysicalOlapScan[lineitem] diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q4.out b/regression-test/data/shape_check/tpch_sf1000/hint/q4.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q4.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q4.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q5.out b/regression-test/data/shape_check/tpch_sf1000/hint/q5.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q5.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q5.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q6.out b/regression-test/data/shape_check/tpch_sf1000/hint/q6.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q6.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q6.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q7.out b/regression-test/data/shape_check/tpch_sf1000/hint/q7.out similarity index 89% rename from regression-test/data/nereids_hint_tpch_p0/shape/q7.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q7.out index d9fb0e0791a203..62b5874d806aed 100644 --- a/regression-test/data/nereids_hint_tpch_p0/shape/q7.out +++ b/regression-test/data/shape_check/tpch_sf1000/hint/q7.out @@ -8,7 +8,7 @@ PhysicalResultSink ----------PhysicalDistribute[DistributionSpecHash] ------------hashAgg[LOCAL] --------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=(OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]]) +----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=(OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]]) ------------------PhysicalProject --------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() ----------------------PhysicalProject diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q8.out b/regression-test/data/shape_check/tpch_sf1000/hint/q8.out similarity index 90% rename from regression-test/data/nereids_hint_tpch_p0/shape/q8.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q8.out index 486e40152fb644..7be8c0af45a719 100644 --- a/regression-test/data/nereids_hint_tpch_p0/shape/q8.out +++ b/regression-test/data/shape_check/tpch_sf1000/hint/q8.out @@ -11,11 +11,11 @@ PhysicalResultSink ----------------PhysicalProject ------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n2.n_nationkey)) otherCondition=() --------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() +----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() ------------------------PhysicalProject --------------------------PhysicalOlapScan[supplier] ------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() +--------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() ----------------------------PhysicalProject ------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() --------------------------------PhysicalProject diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q9.out b/regression-test/data/shape_check/tpch_sf1000/hint/q9.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q9.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q9.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q1.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q1.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q1.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q1.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q10.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q10.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q10.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q10.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q11.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q11.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q11.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q11.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q12.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q12.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q12.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q12.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q13.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q13.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q13.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q13.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q14.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q14.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q14.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q14.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q15.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q15.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q15.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q15.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q16.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q16.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q16.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q16.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q17.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q17.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q17.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q17.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q18.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q18.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q18.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q18.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q19.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q19.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q19.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q19.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q2.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q2.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q2.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q2.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q20-rewrite.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q20-rewrite.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q20.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q20.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q21.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q21.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q21.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q21.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q22.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q22.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q22.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q22.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q3.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q3.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q3.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q3.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q4.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q4.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q4.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q4.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q5.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q5.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q6.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q6.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q6.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q6.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q7.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q7.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q7.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q7.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q8.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q8.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q8.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q8.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q9.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q9.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q9.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q9.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q1.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q1.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q1.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q1.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q10.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q10.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q10.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q10.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q11.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q11.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q11.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q11.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q12.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q12.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q12.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q12.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q13.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q13.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q13.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q13.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q14.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q14.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q14.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q14.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q15.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q15.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q15.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q15.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q16.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q16.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q16.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q16.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q17.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q17.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q17.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q17.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q18.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q18.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q18.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q18.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q19.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q19.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q19.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q19.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q2.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q2.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q2.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q2.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q20-rewrite.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q20-rewrite.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q20.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q20.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q21.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q21.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q21.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q21.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q22.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q22.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q22.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q22.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q3.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q3.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q3.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q3.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q4.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q4.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q4.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q4.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q5.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q5.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q5.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q5.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q6.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q6.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q6.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q6.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q7.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q7.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q7.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q7.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q8.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q8.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q8.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q8.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q9.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q9.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q9.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q9.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/runtime_filter/test_pushdown_setop.out b/regression-test/data/shape_check/tpch_sf1000/runtime_filter/test_pushdown_setop.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/runtime_filter/test_pushdown_setop.out rename to regression-test/data/shape_check/tpch_sf1000/runtime_filter/test_pushdown_setop.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q1.out b/regression-test/data/shape_check/tpch_sf1000/shape/q1.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q1.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q1.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q10.out b/regression-test/data/shape_check/tpch_sf1000/shape/q10.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q10.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q10.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q11.out b/regression-test/data/shape_check/tpch_sf1000/shape/q11.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q11.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q11.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q12.out b/regression-test/data/shape_check/tpch_sf1000/shape/q12.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q12.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q12.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q13.out b/regression-test/data/shape_check/tpch_sf1000/shape/q13.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q13.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q13.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q14.out b/regression-test/data/shape_check/tpch_sf1000/shape/q14.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q14.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q14.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q15.out b/regression-test/data/shape_check/tpch_sf1000/shape/q15.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q15.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q15.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q16.out b/regression-test/data/shape_check/tpch_sf1000/shape/q16.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q16.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q16.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q17.out b/regression-test/data/shape_check/tpch_sf1000/shape/q17.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q17.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q17.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q18.out b/regression-test/data/shape_check/tpch_sf1000/shape/q18.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q18.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q18.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q19.out b/regression-test/data/shape_check/tpch_sf1000/shape/q19.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q19.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q19.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q2.out b/regression-test/data/shape_check/tpch_sf1000/shape/q2.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q2.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q2.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.out b/regression-test/data/shape_check/tpch_sf1000/shape/q20-rewrite.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q20-rewrite.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20.out b/regression-test/data/shape_check/tpch_sf1000/shape/q20.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q20.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q21.out b/regression-test/data/shape_check/tpch_sf1000/shape/q21.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q21.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q21.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q22.out b/regression-test/data/shape_check/tpch_sf1000/shape/q22.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q22.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q22.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q3.out b/regression-test/data/shape_check/tpch_sf1000/shape/q3.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q3.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q3.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q4.out b/regression-test/data/shape_check/tpch_sf1000/shape/q4.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q4.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q4.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q5.out b/regression-test/data/shape_check/tpch_sf1000/shape/q5.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q5.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q5.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q6.out b/regression-test/data/shape_check/tpch_sf1000/shape/q6.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q6.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q6.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q7.out b/regression-test/data/shape_check/tpch_sf1000/shape/q7.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q7.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q7.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q8.out b/regression-test/data/shape_check/tpch_sf1000/shape/q8.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q8.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q8.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q9.out b/regression-test/data/shape_check/tpch_sf1000/shape/q9.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q9.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q9.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q1.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q1.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q1.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q1.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q10.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q10.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q10.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q10.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q11.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q11.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q11.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q11.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q12.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q12.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q12.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q12.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q13.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q13.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q13.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q13.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q14.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q14.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q14.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q14.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q15.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q15.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q15.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q15.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q16.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q16.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q16.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q16.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q17.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q17.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q17.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q17.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q18.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q18.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q18.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q18.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q19.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q19.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q19.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q19.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q2.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q2.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q2.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q2.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q20-rewrite.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q20-rewrite.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q20.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q20.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q21.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q21.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q21.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q21.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q22.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q22.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q22.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q22.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q3.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q3.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q3.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q3.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q4.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q4.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q4.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q4.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q5.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q5.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q6.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q6.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q6.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q6.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q7.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q7.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q7.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q7.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q8.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q8.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q8.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q8.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q9.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q9.out diff --git a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy index f5d811514b375d..7ed3f1cb605d60 100644 --- a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy +++ b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy @@ -71,6 +71,7 @@ import java.util.concurrent.Future import java.util.concurrent.ThreadFactory import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicBoolean +import java.util.regex.Pattern import java.util.stream.Collectors import java.util.stream.LongStream import static org.apache.doris.regression.util.DataUtils.sortByToString @@ -263,7 +264,7 @@ class Suite implements GroovyInterceptable { } public T connect(String user = context.config.jdbcUser, String password = context.config.jdbcPassword, - String url = context.config.jdbcUrl, Closure actionSupplier) { + String url = context.config.jdbcUrl, Closure actionSupplier) { return context.connect(user, password, url, actionSupplier) } @@ -640,7 +641,7 @@ class Suite implements GroovyInterceptable { } long getTableVersion(long dbId, String tableName) { - def result = sql_return_maparray """show proc '/dbs/${dbId}'""" + def result = sql_return_maparray """show proc '/dbs/${dbId}'""" for (def res : result) { if(res.TableName.equals(tableName)) { log.info(res.toString()) @@ -989,7 +990,7 @@ class Suite implements GroovyInterceptable { if (exitcode != 0) { staticLogger.info("exit code: ${exitcode}, output\n: ${proc.text}") if (mustSuc == true) { - Assert.assertEquals(0, exitcode) + Assert.assertEquals(0, exitcode) } } } catch (IOException e) { @@ -1119,7 +1120,7 @@ class Suite implements GroovyInterceptable { Connection getTargetConnection() { return context.getTargetConnection(this) } - + boolean deleteFile(String filePath) { def file = new File(filePath) file.delete() @@ -1142,7 +1143,7 @@ class Suite implements GroovyInterceptable { ) DISTRIBUTED BY HASH(id) BUCKETS 1 PROPERTIES ( - "replication_num" = "${backends.size()}" + "replication_num" = "${backends.size()}" ) """ @@ -1314,13 +1315,24 @@ class Suite implements GroovyInterceptable { throw new IllegalStateException("Check tag '${tag}' failed, sql:\n${arg}", t) } if (errorMsg != null) { + def allPlan = "" + if (arg instanceof String) { + def query = (String) arg; + def pattern = Pattern.compile("^\\s*explain\\s+shape\\s*plan\\s*", Pattern.MULTILINE) + if (query =~ pattern) { + def physical = query.replaceAll(pattern, "explain all plan ") + try { + allPlan = JdbcUtils.executeToStringList(context.getConnection(), physical)[0].join('\n') + } catch (Throwable ignore) {} + } + } String csvRealResult = realResults.stream() - .map {row -> OutputUtils.toCsvString(row)} - .collect(Collectors.joining("\n")) + .map { row -> OutputUtils.toCsvString(row) } + .collect(Collectors.joining("\n")) def outputFilePath = context.outputFile.getCanonicalPath().substring(context.config.dataPath.length() + 1) def line = expectCsvResults.currentLine() logger.warn("expect results in file: ${outputFilePath}, line: ${line}\nrealResults:\n" + csvRealResult) - throw new IllegalStateException("Check tag '${tag}' failed:\n${errorMsg}\n\nsql:\n${arg}") + throw new IllegalStateException("Check tag '${tag}' failed:\n${errorMsg}\n\nsql:\n${arg}\n\n${allPlan}") } } } @@ -1962,6 +1974,9 @@ class Suite implements GroovyInterceptable { for (String mv_name : mv_names) { success = success && result.contains("(${mv_name})") } + if (!success) { + logger.info("mv_rewrite_all_success fail =" + result) + } Assert.assertEquals(true, success) } } @@ -1972,7 +1987,11 @@ class Suite implements GroovyInterceptable { check { result -> boolean success = true; for (String mv_name : mv_names) { - Assert.assertEquals(true, result.contains("${mv_name} chose")) + def contains = result.contains("${mv_name} chose") + if (!contains) { + logger.info("mv_rewrite_all_success fail =" + result) + } + Assert.assertEquals(true, contains) } } } @@ -1999,6 +2018,9 @@ class Suite implements GroovyInterceptable { for (String mv_name : mv_names) { success = success || result.contains("(${mv_name})") } + if (!success) { + logger.info("mv_rewrite_any_success fail =" + result) + } Assert.assertEquals(true, success) } } @@ -2011,6 +2033,9 @@ class Suite implements GroovyInterceptable { for (String mv_name : mv_names) { success = success || result.contains("${mv_name} chose") } + if (!success) { + logger.info("mv_rewrite_any_success fail =" + result) + } Assert.assertEquals(true, success) } } @@ -2031,6 +2056,9 @@ class Suite implements GroovyInterceptable { def each_result = splitResult.length == 2 ? splitResult[0].contains(mv_name) : false success = success && (result.contains("(${mv_name})") || each_result) } + if (!success) { + logger.info("mv_rewrite_all_success_without_check_chosen fail =" + result) + } Assert.assertEquals(true, success) } } @@ -2044,6 +2072,9 @@ class Suite implements GroovyInterceptable { boolean stepSuccess = result.contains("${mv_name} chose") || result.contains("${mv_name} not chose") success = success && stepSuccess } + if (!success) { + logger.info("mv_rewrite_all_success_without_check_chosen fail =" + result) + } Assert.assertEquals(true, success) } } @@ -2064,6 +2095,9 @@ class Suite implements GroovyInterceptable { def each_result = splitResult.length == 2 ? splitResult[0].contains(mv_name) : false success = success || (result.contains("(${mv_name})") || each_result) } + if (!success) { + logger.info("mv_rewrite_any_success_without_check_chosen fail =" + result) + } Assert.assertEquals(true, success) } } @@ -2076,6 +2110,9 @@ class Suite implements GroovyInterceptable { for (String mv_name : mv_names) { success = success || result.contains("${mv_name} chose") || result.contains("${mv_name} not chose") } + if (!success) { + logger.info("mv_rewrite_any_success_without_check_chosen fail =" + result) + } Assert.assertEquals(true, success) } } @@ -2134,6 +2171,9 @@ class Suite implements GroovyInterceptable { boolean stepFail = !result.contains("(${mv_name})") fail = fail && stepFail } + if (!fail) { + logger.info("mv_rewrite_all_fail =" + result) + } Assert.assertEquals(true, fail) } } @@ -2147,6 +2187,9 @@ class Suite implements GroovyInterceptable { boolean stepFail = result.contains("${mv_name} fail") fail = fail && stepFail } + if (!fail) { + logger.info("mv_rewrite_all_fail =" + result) + } Assert.assertEquals(true, fail) } } @@ -2164,6 +2207,9 @@ class Suite implements GroovyInterceptable { for (String mv_name : mv_names) { fail = fail || !result.contains("(${mv_name})") } + if (!fail) { + logger.info("mv_rewrite_any_fail =" + result) + } Assert.assertEquals(true, fail) } } @@ -2176,6 +2222,9 @@ class Suite implements GroovyInterceptable { for (String mv_name : mv_names) { fail = fail || result.contains("${mv_name} fail") } + if (!fail) { + logger.info("mv_rewrite_any_fail =" + result) + } Assert.assertEquals(true, fail) } } diff --git a/regression-test/pipeline/p0/conf/be.conf b/regression-test/pipeline/p0/conf/be.conf index 760b8762430bd7..0b73375b3fbdd3 100644 --- a/regression-test/pipeline/p0/conf/be.conf +++ b/regression-test/pipeline/p0/conf/be.conf @@ -71,7 +71,7 @@ be_proc_monitor_interval_ms = 30000 webserver_num_workers = 128 pipeline_task_leakage_detect_period_sec=1 crash_in_memory_tracker_inaccurate = true -enable_table_size_correctness_check=true +#enable_table_size_correctness_check=true enable_brpc_connection_check=true # enable download small files in batch, see apache/doris#45061 for details diff --git a/regression-test/suites/account_p0/test_system_db.groovy b/regression-test/suites/account_p0/test_system_db.groovy new file mode 100644 index 00000000000000..11b9d6d492bf42 --- /dev/null +++ b/regression-test/suites/account_p0/test_system_db.groovy @@ -0,0 +1,46 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_system_db","p0,auth") { + String suiteName = "test_system_db" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + sql """ + grant select_priv on __internal_schema.* to `${user}`; + """ + sql """ + grant select_priv on information_schema.* to `${user}`; + """ + sql """ + grant select_priv on mysql.* to `${user}`; + """ + sql """ + revoke select_priv on __internal_schema.* from `${user}`; + """ + sql """ + revoke select_priv on information_schema.* from `${user}`; + """ + sql """ + revoke select_priv on mysql.* from `${user}`; + """ + try_sql("DROP USER ${user}") +} diff --git a/regression-test/suites/account_p0/test_system_role.groovy b/regression-test/suites/account_p0/test_system_role.groovy new file mode 100644 index 00000000000000..64c0f122fa21dd --- /dev/null +++ b/regression-test/suites/account_p0/test_system_role.groovy @@ -0,0 +1,61 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_system_role","p0,auth") { + test { + sql """ + drop role operator; + """ + exception "Can not drop role" + } + + test { + sql """ + drop role `admin`; + """ + exception "Can not drop role" + } + + test { + sql """ + grant select_priv on *.*.* to role "operator"; + """ + exception "Can not grant" + } + test { + sql """ + grant select_priv on *.*.* to role "admin"; + """ + exception "Can not grant" + } + test { + sql """ + revoke Node_priv on *.*.* from role 'operator'; + """ + exception "Can not revoke" + } + + test { + sql """ + revoke Admin_priv on *.*.* from role 'admin'; + """ + exception "Can not revoke" + } + +} diff --git a/regression-test/suites/account_p0/test_system_user.groovy b/regression-test/suites/account_p0/test_system_user.groovy index 1805f1669ea570..5993e1d238b444 100644 --- a/regression-test/suites/account_p0/test_system_user.groovy +++ b/regression-test/suites/account_p0/test_system_user.groovy @@ -17,7 +17,7 @@ import org.junit.Assert; -suite("test_system_user") { +suite("test_system_user","p0,auth") { test { sql """ create user `root`; @@ -36,4 +36,30 @@ suite("test_system_user") { """ exception "system" } + test { + sql """ + revoke "operator" from root; + """ + exception "Can not revoke role" + } + test { + sql """ + revoke 'admin' from `admin`; + """ + exception "Unsupported operation" + } + + sql """ + grant select_priv on *.*.* to `root`; + """ + sql """ + revoke select_priv on *.*.* from `root`; + """ + sql """ + grant select_priv on *.*.* to `admin`; + """ + sql """ + revoke select_priv on *.*.* from `admin`; + """ + } diff --git a/regression-test/suites/auth_p0/test_backends_auth.groovy b/regression-test/suites/auth_p0/test_backends_auth.groovy new file mode 100644 index 00000000000000..753ae837c776e9 --- /dev/null +++ b/regression-test/suites/auth_p0/test_backends_auth.groovy @@ -0,0 +1,64 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_backends_auth","p0,auth") { + String suiteName = "test_backends_auth" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + show backends; + """ + exception "denied" + } + test { + sql """ + select * from backends(); + """ + exception "denied" + } + } + + sql """grant admin_priv on *.*.* to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + show backends; + """ + sql """ + select * from backends(); + """ + } + + try_sql("DROP USER ${user}") +} \ No newline at end of file diff --git a/regression-test/suites/auth_p0/test_catalogs_auth.groovy b/regression-test/suites/auth_p0/test_catalogs_auth.groovy new file mode 100644 index 00000000000000..96ebcef7cf81cb --- /dev/null +++ b/regression-test/suites/auth_p0/test_catalogs_auth.groovy @@ -0,0 +1,68 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_catalogs_auth","p0,auth") { + String suiteName = "test_catalogs_auth" + String catalogName = "${suiteName}_catalog" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + sql """drop catalog if exists ${catalogName}""" + sql """CREATE CATALOG ${catalogName} PROPERTIES ( + "type"="es", + "hosts"="http://8.8.8.8:9200" + );""" + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + def showRes = sql """show catalogs;""" + logger.info("showRes: " + showRes.toString()) + assertFalse(showRes.toString().contains("${catalogName}")) + + def tvfRes = sql """select * from catalogs();""" + logger.info("tvfRes: " + tvfRes.toString()) + assertFalse(tvfRes.toString().contains("${catalogName}")) + } + + sql """grant select_priv on ${catalogName}.*.* to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + def showRes = sql """show catalogs;""" + logger.info("showRes: " + showRes.toString()) + assertTrue(showRes.toString().contains("${catalogName}")) + + def tvfRes = sql """select * from catalogs();""" + logger.info("tvfRes: " + tvfRes.toString()) + assertTrue(tvfRes.toString().contains("${catalogName}")) + } + + try_sql("DROP USER ${user}") + sql """drop catalog if exists ${catalogName}""" +} diff --git a/regression-test/suites/auth_p0/test_frontends_auth.groovy b/regression-test/suites/auth_p0/test_frontends_auth.groovy new file mode 100644 index 00000000000000..21fff527518e2b --- /dev/null +++ b/regression-test/suites/auth_p0/test_frontends_auth.groovy @@ -0,0 +1,64 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_frontends_auth","p0,auth") { + String suiteName = "test_frontends_auth" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + show frontends; + """ + exception "denied" + } + test { + sql """ + select * from frontends(); + """ + exception "denied" + } + } + + sql """grant admin_priv on *.*.* to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + show frontends; + """ + sql """ + select * from frontends(); + """ + } + + try_sql("DROP USER ${user}") +} \ No newline at end of file diff --git a/regression-test/suites/auth_p0/test_frontends_disks_auth.groovy b/regression-test/suites/auth_p0/test_frontends_disks_auth.groovy new file mode 100644 index 00000000000000..3767fdde0a5e92 --- /dev/null +++ b/regression-test/suites/auth_p0/test_frontends_disks_auth.groovy @@ -0,0 +1,55 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_frontends_disks_auth","p0,auth") { + String suiteName = "test_frontends_disks_auth" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + select * from frontends_disks(); + """ + exception "denied" + } + } + + sql """grant admin_priv on *.*.* to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + select * from frontends_disks(); + """ + } + + try_sql("DROP USER ${user}") +} \ No newline at end of file diff --git a/regression-test/suites/auth_p0/test_mtmv_auth.groovy b/regression-test/suites/auth_p0/test_mtmv_auth.groovy new file mode 100644 index 00000000000000..52ecbebb70b268 --- /dev/null +++ b/regression-test/suites/auth_p0/test_mtmv_auth.groovy @@ -0,0 +1,100 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_mtmv_auth","p0,auth") { + String suiteName = "test_mtmv_auth" + String dbName = context.config.getDbNameByFile(context.file) + String tableName = "${suiteName}_table" + String mvName = "${suiteName}_mv" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + sql """DROP MATERIALIZED VIEW IF EXISTS ${mvName};""" + sql """drop table if exists `${tableName}`""" + sql """ + CREATE TABLE `${tableName}` ( + `user_id` LARGEINT NOT NULL COMMENT '\"用户id\"', + `date` DATE NOT NULL COMMENT '\"数据灌入日期时间\"', + `num` SMALLINT NOT NULL COMMENT '\"数量\"' + ) ENGINE=OLAP + DUPLICATE KEY(`user_id`, `date`, `num`) + COMMENT 'OLAP' + DISTRIBUTED BY HASH(`user_id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1') ; + """ + + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS + select * from ${tableName}; + """ + + sql """refresh MATERIALIZED VIEW ${mvName} auto""" + waitingMTMVTaskFinishedByMvName(mvName) + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + def mvsRes = sql """select * from mv_infos("database"="${dbName}");""" + logger.info("mvsRes: " + mvsRes.toString()) + assertFalse(mvsRes.toString().contains("${mvName}")) + + def jobsRes = sql """select * from jobs("type"="mv");""" + logger.info("jobsRes: " + jobsRes.toString()) + assertFalse(jobsRes.toString().contains("${mvName}")) + + def tasksRes = sql """select * from tasks("type"="mv");""" + logger.info("tasksRes: " + tasksRes.toString()) + assertFalse(tasksRes.toString().contains("${mvName}")) + + } + + sql """grant select_priv on ${dbName}.${mvName} to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + def mvsRes = sql """select * from mv_infos("database"="${dbName}");""" + logger.info("mvsRes: " + mvsRes.toString()) + assertTrue(mvsRes.toString().contains("${mvName}")) + + def jobsRes = sql """select * from jobs("type"="mv");""" + logger.info("jobsRes: " + jobsRes.toString()) + assertTrue(jobsRes.toString().contains("${mvName}")) + + def tasksRes = sql """select * from tasks("type"="mv");""" + logger.info("tasksRes: " + tasksRes.toString()) + assertTrue(tasksRes.toString().contains("${mvName}")) + } + + try_sql("DROP USER ${user}") + sql """DROP MATERIALIZED VIEW IF EXISTS ${mvName};""" + sql """drop table if exists `${tableName}`""" +} diff --git a/regression-test/suites/auth_p0/test_partition_values_tvf_auth.groovy b/regression-test/suites/auth_p0/test_partition_values_tvf_auth.groovy new file mode 100644 index 00000000000000..3f0ae7ea8d524c --- /dev/null +++ b/regression-test/suites/auth_p0/test_partition_values_tvf_auth.groovy @@ -0,0 +1,69 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_partition_values_tvf_auth","p0,auth") { + String suiteName = "test_partition_values_tvf_auth" + String enabled = context.config.otherConfigs.get("enableHiveTest") + if (enabled == null || !enabled.equalsIgnoreCase("true")) { + logger.info("disable Hive test.") + return; + } + + for (String hivePrefix : ["hive3"]) { + String extHiveHmsHost = context.config.otherConfigs.get("externalEnvIp") + String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + "HmsPort") + String catalog_name = "${hivePrefix}_test_external_catalog_hive_partition" + + sql """drop catalog if exists ${catalog_name};""" + sql """ + create catalog if not exists ${catalog_name} properties ( + 'type'='hms', + 'hive.metastore.uris' = 'thrift://${extHiveHmsHost}:${extHiveHmsPort}' + ); + """ + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + select * from partition_values("catalog" = "${catalog_name}", "database" = "multi_catalog", "table" = "orc_partitioned_columns") order by t_int, t_float; + """ + exception "denied" + } + } + sql """grant select_priv on ${catalog_name}.multi_catalog.orc_partitioned_columns to ${user}""" + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + select * from partition_values("catalog" = "${catalog_name}", "database" = "multi_catalog", "table" = "orc_partitioned_columns") order by t_int, t_float; + """ + } + try_sql("DROP USER ${user}") + sql """drop catalog if exists ${catalog_name}""" + } +} + diff --git a/regression-test/suites/auth_p0/test_partitions_auth.groovy b/regression-test/suites/auth_p0/test_partitions_auth.groovy new file mode 100644 index 00000000000000..0b769f11567845 --- /dev/null +++ b/regression-test/suites/auth_p0/test_partitions_auth.groovy @@ -0,0 +1,84 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_partitions_auth","p0,auth") { + String suiteName = "test_partitions_auth" + String dbName = context.config.getDbNameByFile(context.file) + String tableName = "${suiteName}_table" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + sql """drop table if exists `${tableName}`""" + sql """ + CREATE TABLE `${tableName}` ( + `user_id` LARGEINT NOT NULL COMMENT '\"用户id\"', + `date` DATE NOT NULL COMMENT '\"数据灌入日期时间\"', + `num` SMALLINT NOT NULL COMMENT '\"数量\"' + ) ENGINE=OLAP + DUPLICATE KEY(`user_id`, `date`, `num`) + COMMENT 'OLAP' + PARTITION BY RANGE(`date`) + (PARTITION p201701_1000 VALUES [('0000-01-01'), ('2017-02-01')), + PARTITION p201702_2000 VALUES [('2017-02-01'), ('2017-03-01')), + PARTITION p201703_all VALUES [('2017-03-01'), ('2017-04-01'))) + DISTRIBUTED BY HASH(`user_id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1') ; + """ + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + show partitions from ${dbName}.${tableName}; + """ + exception "denied" + } + test { + sql """ + select * from partitions('catalog'='internal',"database"="${dbName}","table"="${tableName}"); + """ + exception "denied" + } + } + + sql """grant select_priv on ${dbName}.${tableName} to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + show partitions from ${dbName}.${tableName}; + """ + sql """ + select * from partitions('catalog'='internal',"database"="${dbName}","table"="${tableName}"); + """ + } + + try_sql("DROP USER ${user}") + sql """drop table if exists `${tableName}`""" +} diff --git a/regression-test/suites/auth_p0/test_query_tvf_auth.groovy b/regression-test/suites/auth_p0/test_query_tvf_auth.groovy new file mode 100644 index 00000000000000..05c274077d9eb3 --- /dev/null +++ b/regression-test/suites/auth_p0/test_query_tvf_auth.groovy @@ -0,0 +1,74 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_jdbc_query_tvf","p0,auth") { + String suiteName = "test_jdbc_query_tvf" + String enabled = context.config.otherConfigs.get("enableJdbcTest") + String externalEnvIp = context.config.otherConfigs.get("externalEnvIp") + String s3_endpoint = getS3Endpoint() + String bucket = getS3BucketName() + String driver_url = "https://${bucket}.${s3_endpoint}/regression/jdbc_driver/mysql-connector-java-8.0.25.jar" + if (enabled != null && enabled.equalsIgnoreCase("true")) { + String user = "test_jdbc_user"; + String pwd = '123456'; + String catalog_name = "${suiteName}_catalog" + String mysql_port = context.config.otherConfigs.get("mysql_57_port"); + + sql """drop catalog if exists ${catalog_name} """ + + sql """create catalog if not exists ${catalog_name} properties( + "type"="jdbc", + "user"="root", + "password"="123456", + "jdbc_url" = "jdbc:mysql://${externalEnvIp}:${mysql_port}/doris_test", + "driver_url" = "${driver_url}", + "driver_class" = "com.mysql.cj.jdbc.Driver" + );""" + + String dorisuser = "${suiteName}_user" + String dorispwd = 'C123_567p' + try_sql("DROP USER ${dorisuser}") + sql """CREATE USER '${dorisuser}' IDENTIFIED BY '${dorispwd}'""" + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${dorisuser}"""; + } + + sql """grant select_priv on regression_test to ${dorisuser}""" + + connect(user=dorisuser, password="${dorispwd}", url=context.config.jdbcUrl) { + test { + sql """ + select * from query('catalog' = '${catalog_name}', 'query' = 'select * from doris_test.all_types'); + """ + exception "denied" + } + } + sql """grant select_priv on ${catalog_name}.*.* to ${dorisuser}""" + connect(user=dorisuser, password="${dorispwd}", url=context.config.jdbcUrl) { + sql """ + select * from query('catalog' = '${catalog_name}', 'query' = 'select * from doris_test.all_types'); + """ + } + try_sql("DROP USER ${dorisuser}") + sql """drop catalog if exists ${catalog_name} """ + } +} + diff --git a/regression-test/suites/auth_p0/test_select_column_auth.groovy b/regression-test/suites/auth_p0/test_select_column_auth.groovy index 52f1dc02697dd4..36cc2a0a09cf1c 100644 --- a/regression-test/suites/auth_p0/test_select_column_auth.groovy +++ b/regression-test/suites/auth_p0/test_select_column_auth.groovy @@ -130,6 +130,10 @@ suite("test_select_column_auth","p0,auth") { sql """grant select_priv(sum_id) on ${dbName}.${mtmv_name} to ${user}""" sql """grant select_priv(id) on ${dbName}.${tableName} to ${user}""" connect(user, "${pwd}", context.config.jdbcUrl) { + def show_grants = sql """show grants;""" + logger.info("show grants:" + show_grants.toString()) + // If exec on fe follower, wait meta data is ready on follower + Thread.sleep(2000) sql "SET enable_materialized_view_rewrite=true" explain { sql("""select username, sum(id) from ${dbName}.${tableName} group by username""") diff --git a/regression-test/suites/auth_p0/test_select_count_auth.groovy b/regression-test/suites/auth_p0/test_select_count_auth.groovy new file mode 100644 index 00000000000000..ccea1a4a580098 --- /dev/null +++ b/regression-test/suites/auth_p0/test_select_count_auth.groovy @@ -0,0 +1,93 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_select_count_auth","p0,auth") { + String suiteName = "test_select_count_auth" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + select count(*) from __internal_schema.audit_log; + """ + exception "denied" + } + test { + sql """ + select count(1) from __internal_schema.audit_log; + """ + exception "denied" + } + test { + sql """ + select count(query_id) from __internal_schema.audit_log; + """ + exception "denied" + } + } + + sql """grant select_priv(query_id) on __internal_schema.audit_log to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + select count(*) from __internal_schema.audit_log; + """ + exception "denied" + } + test { + sql """ + select count(1) from __internal_schema.audit_log; + """ + exception "denied" + } + sql """ + select count(query_id) from __internal_schema.audit_log; + """ + } + + sql """grant select_priv on __internal_schema.audit_log to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + select count(*) from __internal_schema.audit_log; + """ + sql """ + select count(1) from __internal_schema.audit_log; + """ + sql """ + select count(query_id) from __internal_schema.audit_log; + """ + } + + try_sql("DROP USER ${user}") +} diff --git a/regression-test/suites/compression_p0/load.groovy b/regression-test/suites/compression_p0/load.groovy index 70eeafea12ae48..722732f65c3b06 100644 --- a/regression-test/suites/compression_p0/load.groovy +++ b/regression-test/suites/compression_p0/load.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_compression", "p0") { +suite("load") { // test snappy compression algorithm def tableName = "test_snappy" diff --git a/regression-test/suites/correctness/test_date_function_const.groovy b/regression-test/suites/correctness/test_date_function_const.groovy index d1ba4db4e68987..e9bf11bd24ebd6 100644 --- a/regression-test/suites/correctness/test_date_function_const.groovy +++ b/regression-test/suites/correctness/test_date_function_const.groovy @@ -61,6 +61,6 @@ suite("test_date_function_const") { test { sql """select date_add("1900-01-01 12:00:00.123456", interval 10000000000 month);""" - exception "Operation months_add 133705200962757184 1410065408 out of range" + exception "Operation months_add of 1900-01-01 12:00:00.123456, 1410065408 out of range" } } diff --git a/regression-test/suites/datatype_p0/nested_types/query/test_nestedtypes_insert_into_select.groovy b/regression-test/suites/datatype_p0/nested_types/query/test_nestedtypes_insert_into_select.groovy index 633ad98d86f556..b448ad406bbde2 100644 --- a/regression-test/suites/datatype_p0/nested_types/query/test_nestedtypes_insert_into_select.groovy +++ b/regression-test/suites/datatype_p0/nested_types/query/test_nestedtypes_insert_into_select.groovy @@ -32,7 +32,7 @@ suite("test_nestedtypes_insert_into_select", "p0") { test { sql "insert into ast values ('text' , [named_struct('a',1,'b','home'),named_struct('a',2,'b','work')]);" - exception "mismatched input 'named_struct' expecting" + exception "no viable alternative at input '[named_struct'" } @@ -50,6 +50,6 @@ suite("test_nestedtypes_insert_into_select", "p0") { test { sql "insert into ast values ('text' , [named_struct('a',1,'b','home'),named_struct('a',2,'b','work')]);" - exception "mismatched input 'named_struct' expecting" + exception "no viable alternative at input '[named_struct'" } } diff --git a/regression-test/suites/external_table_p0/hive/test_hive_orc_predicate.groovy b/regression-test/suites/external_table_p0/hive/test_hive_orc_predicate.groovy new file mode 100644 index 00000000000000..2dd647aa2c1d8e --- /dev/null +++ b/regression-test/suites/external_table_p0/hive/test_hive_orc_predicate.groovy @@ -0,0 +1,50 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_hive_orc_predicate", "p0,external,hive,external_docker,external_docker_hive") { + + String enabled = context.config.otherConfigs.get("enableHiveTest") + if (enabled == null || !enabled.equalsIgnoreCase("true")) { + logger.info("diable Hive test.") + return; + } + + for (String hivePrefix : ["hive2", "hive3"]) { + try { + String hms_port = context.config.otherConfigs.get(hivePrefix + "HmsPort") + String catalog_name = "${hivePrefix}_test_predicate" + String externalEnvIp = context.config.otherConfigs.get("externalEnvIp") + + sql """drop catalog if exists ${catalog_name}""" + sql """create catalog if not exists ${catalog_name} properties ( + "type"="hms", + 'hive.metastore.uris' = 'thrift://${externalEnvIp}:${hms_port}' + );""" + sql """use `${catalog_name}`.`multi_catalog`""" + + qt_predicate_fixed_char1 """ select * from fixed_char_table where c = 'a';""" + qt_predicate_fixed_char2 """ select * from fixed_char_table where c = 'a ';""" + + qt_predicate_changed_type1 """ select * from type_changed_table where id = '1';""" + qt_predicate_changed_type2 """ select * from type_changed_table where id = '2';""" + qt_predicate_changed_type3 """ select * from type_changed_table where id = '3';""" + + sql """drop catalog if exists ${catalog_name}""" + } finally { + } + } +} diff --git a/regression-test/suites/external_table_p2/tvf/test_iceberg_meta.groovy b/regression-test/suites/external_table_p2/tvf/test_iceberg_meta.groovy index 047b4a36fe2622..557eaf5b061d70 100644 --- a/regression-test/suites/external_table_p2/tvf/test_iceberg_meta.groovy +++ b/regression-test/suites/external_table_p2/tvf/test_iceberg_meta.groovy @@ -16,7 +16,7 @@ // under the License. suite("test_iceberg_meta", "p2,external,iceberg,external_remote,external_remote_iceberg") { - + String suiteName = "test_iceberg_meta" Boolean ignoreP2 = true; if (ignoreP2) { logger.info("disable p2 test"); @@ -54,5 +54,37 @@ suite("test_iceberg_meta", "p2,external,iceberg,external_remote,external_remote_ "query_type" = "snapshots") where snapshot_id = 7235593032487457798; """ + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + select committed_at, snapshot_id, parent_id, operation from iceberg_meta( + "table" = "${iceberg_catalog_name}.${db}.multi_partition", + "query_type" = "snapshots"); + """ + exception "denied" + } + } + sql """grant select_priv on ${iceberg_catalog_name}.${db}.multi_partition to ${user}""" + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + select committed_at, snapshot_id, parent_id, operation from iceberg_meta( + "table" = "${iceberg_catalog_name}.${db}.multi_partition", + "query_type" = "snapshots"); + """ + } + try_sql("DROP USER ${user}") } } diff --git a/regression-test/suites/fault_injection_p0/test_index_compaction_exception_fault_injection.groovy b/regression-test/suites/fault_injection_p0/test_index_compaction_exception_fault_injection.groovy index b54f6374d833b8..9c0cb5aea97f0b 100644 --- a/regression-test/suites/fault_injection_p0/test_index_compaction_exception_fault_injection.groovy +++ b/regression-test/suites/fault_injection_p0/test_index_compaction_exception_fault_injection.groovy @@ -26,8 +26,6 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { def changed_variables = sql "show variables where Changed = 1" logger.info("changed variables: " + changed_variables.toString()) - // sql "UNSET GLOBAL VARIABLE ALL;" - sql "SET global enable_match_without_inverted_index = false" boolean disableAutoCompaction = false @@ -120,7 +118,7 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { } def run_sql = { -> - def result = sql_return_maparray "SELECT * FROM ${tableName} WHERE name MATCH 'bason'" + def result = sql_return_maparray "SELECT /*+ SET_VAR(enable_match_without_inverted_index = false, enable_common_expr_pushdown = true) */ * FROM ${tableName} WHERE name MATCH 'bason'" assertEquals(3, result.size()) assertEquals(1, result[0]['id']) assertEquals("bason", result[0]['name']) @@ -129,7 +127,7 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { assertEquals(3, result[2]['id']) assertEquals("bason", result[2]['name']) - result = sql_return_maparray "SELECT * FROM ${tableName} WHERE age = 11" + result = sql_return_maparray "SELECT /*+ SET_VAR(enable_match_without_inverted_index = false, enable_common_expr_pushdown = true) */ * FROM ${tableName} WHERE age = 11" assertEquals(3, result.size()) assertEquals(1, result[0]['id']) assertEquals("bason", result[0]['name']) @@ -138,7 +136,7 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { assertEquals(3, result[2]['id']) assertEquals("bason", result[2]['name']) - result = sql_return_maparray "SELECT * FROM ${tableName} WHERE description MATCH 'singing'" + result = sql_return_maparray "SELECT /*+ SET_VAR(enable_match_without_inverted_index = false, enable_common_expr_pushdown = true) */ * FROM ${tableName} WHERE description MATCH 'singing'" assertEquals(3, result.size()) assertEquals("bason", result[0]['name']) assertEquals("bason is good at singing", result[0]['description']) @@ -147,7 +145,7 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { assertEquals("bason", result[2]['name']) assertEquals("bason is good at singing", result[2]['description']) - result = sql_return_maparray "SELECT * FROM ${tableName} WHERE array_contains(scores, 79)" + result = sql_return_maparray "SELECT /*+ SET_VAR(enable_match_without_inverted_index = false, enable_common_expr_pushdown = true) */ * FROM ${tableName} WHERE array_contains(scores, 79)" assertEquals(3, result.size()) assertEquals("bason", result[0]['name']) assertEquals("[79, 85, 97]", result[0]['scores']) @@ -156,7 +154,7 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { assertEquals("bason", result[2]['name']) assertEquals("[79, 85, 97]", result[2]['scores']) - result = sql_return_maparray "SELECT * FROM ${tableName} WHERE array_contains(hobbies, 'dancing')" + result = sql_return_maparray "SELECT /*+ SET_VAR(enable_match_without_inverted_index = false, enable_common_expr_pushdown = true) */ * FROM ${tableName} WHERE array_contains(hobbies, 'dancing')" assertEquals(3, result.size()) assertEquals("bason", result[0]['name']) assertEquals('["singing", "dancing"]', result[0]['hobbies']) @@ -165,7 +163,7 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { assertEquals("bason", result[2]['name']) assertEquals('["singing", "dancing"]', result[2]['hobbies']) - result = sql_return_maparray "SELECT * FROM ${tableName} WHERE array_contains(evaluation, 'bason is very clever')" + result = sql_return_maparray "SELECT /*+ SET_VAR(enable_match_without_inverted_index = false, enable_common_expr_pushdown = true) */ * FROM ${tableName} WHERE array_contains(evaluation, 'bason is very clever')" assertEquals(3, result.size()) assertEquals("bason", result[0]['name']) assertEquals('["bason is very clever", "bason is very healthy"]', result[0]['evaluation']) @@ -338,7 +336,5 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { if (has_update_be_config) { set_be_config.call("inverted_index_compaction_enable", invertedIndexCompactionEnable.toString()) } - sql "SET global enable_match_without_inverted_index = true" } - } diff --git a/regression-test/suites/fault_injection_p0/test_inverted_index_cache.groovy b/regression-test/suites/fault_injection_p0/test_inverted_index_cache.groovy new file mode 100644 index 00000000000000..fd250a7d4fd528 --- /dev/null +++ b/regression-test/suites/fault_injection_p0/test_inverted_index_cache.groovy @@ -0,0 +1,144 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_inverted_index_cache", "nonConcurrent") { + // define a sql table + def indexTbName = "test_inverted_index_cache" + + sql "DROP TABLE IF EXISTS ${indexTbName}" + sql """ + CREATE TABLE ${indexTbName} ( + `@timestamp` int(11) NULL COMMENT "", + `clientip` varchar(20) NULL COMMENT "", + `request` text NULL COMMENT "", + `status` int(11) NULL COMMENT "", + `size` int(11) NULL COMMENT "", + INDEX request_idx (`request`) USING INVERTED PROPERTIES("parser" = "english", "support_phrase" = "true") COMMENT '', + ) ENGINE=OLAP + DUPLICATE KEY(`@timestamp`) + COMMENT "OLAP" + DISTRIBUTED BY RANDOM BUCKETS 1 + PROPERTIES ( + "replication_allocation" = "tag.location.default: 1", + "disable_auto_compaction" = "true" + ); + """ + + def load_httplogs_data = {table_name, label, read_flag, format_flag, file_name, ignore_failure=false, + expected_succ_rows = -1, load_to_single_tablet = 'true' -> + + // load the json data + streamLoad { + table "${table_name}" + + // set http request header params + set 'label', label + "_" + UUID.randomUUID().toString() + set 'read_json_by_line', read_flag + set 'format', format_flag + file file_name // import json file + time 10000 // limit inflight 10s + if (expected_succ_rows >= 0) { + set 'max_filter_ratio', '1' + } + + // if declared a check callback, the default check condition will ignore. + // So you must check all condition + check { result, exception, startTime, endTime -> + if (ignore_failure && expected_succ_rows < 0) { return } + if (exception != null) { + throw exception + } + log.info("Stream load result: ${result}".toString()) + def json = parseJson(result) + } + } + } + + load_httplogs_data.call(indexTbName, 'test_index_inlist_fault_injection', 'true', 'json', 'documents-1000.json') + sql "sync" + + qt_sql """ select count() from ${indexTbName} where (request match 'images'); """ + + // query cache hit + // searcher cache hit + try { + sql """ set enable_inverted_index_query_cache = true """ + sql """ set enable_inverted_index_searcher_cache = true """ + + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_miss") + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_miss") + + qt_sql """ select count() from ${indexTbName} where (request match 'images'); """ + + } finally { + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_miss") + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_miss") + } + + // query cache miss + // searcher cache hit + try { + sql """ set enable_inverted_index_query_cache = false """ + sql """ set enable_inverted_index_searcher_cache = true """ + + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_hit") + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_miss") + + qt_sql """ select count() from ${indexTbName} where (request match 'images'); """ + + } finally { + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_hit") + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_miss") + } + + // query cache hit + // searcher cache miss + try { + sql """ set enable_inverted_index_query_cache = true """ + sql """ set enable_inverted_index_searcher_cache = false """ + + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_miss") + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_hit") + + qt_sql """ select count() from ${indexTbName} where (request match 'images'); """ + qt_sql """ select count() from ${indexTbName} where (request match 'english'); """ + + } finally { + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_miss") + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_hit") + } + + // query cache miss + // searcher cache miss + try { + sql """ set enable_inverted_index_query_cache = false """ + sql """ set enable_inverted_index_searcher_cache = false """ + + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_hit") + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_hit") + + qt_sql """ select count() from ${indexTbName} where (request match 'images'); """ + qt_sql """ select count() from ${indexTbName} where (request match 'english'); """ + + } finally { + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_hit") + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_hit") + } + + sql """ set enable_inverted_index_query_cache = true """ + sql """ set enable_inverted_index_searcher_cache = true """ +} \ No newline at end of file diff --git a/regression-test/suites/index_p0/load.groovy b/regression-test/suites/index_p0/load.groovy index 174339f148300a..5416a5096329cb 100644 --- a/regression-test/suites/index_p0/load.groovy +++ b/regression-test/suites/index_p0/load.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_bitmap_index_load") { +suite("load") { def tbName = "test_decimal_bitmap_index_multi_page" sql """ diff --git a/regression-test/suites/insert_p0/insert_group_commit_with_exception.groovy b/regression-test/suites/insert_p0/insert_group_commit_with_exception.groovy index 054add11d9f3a3..166d329c455511 100644 --- a/regression-test/suites/insert_p0/insert_group_commit_with_exception.groovy +++ b/regression-test/suites/insert_p0/insert_group_commit_with_exception.groovy @@ -241,7 +241,7 @@ suite("insert_group_commit_with_exception") { assertTrue(false) } catch (Exception e) { logger.info("exception : " + e) - assertTrue(e.getMessage().contains("insert into cols should be corresponding to the query output")) + assertTrue(e.getMessage().contains("Column count doesn't match value count")) } } getRowCount(14) diff --git a/regression-test/suites/inverted_index_p0/test_inverted_index_v3.groovy b/regression-test/suites/inverted_index_p0/test_inverted_index_v3.groovy index ea7dd0b595f504..82389d84e3cd67 100644 --- a/regression-test/suites/inverted_index_p0/test_inverted_index_v3.groovy +++ b/regression-test/suites/inverted_index_p0/test_inverted_index_v3.groovy @@ -19,9 +19,11 @@ suite("test_inverted_index_v3", "p0"){ def indexTbName1 = "test_inverted_index_v3_1" def indexTbName2 = "test_inverted_index_v3_2" + def indexTbName3 = "test_inverted_index_v3_3" sql "DROP TABLE IF EXISTS ${indexTbName1}" sql "DROP TABLE IF EXISTS ${indexTbName2}" + sql "DROP TABLE IF EXISTS ${indexTbName3}" sql """ CREATE TABLE ${indexTbName1} ( @@ -59,6 +61,24 @@ suite("test_inverted_index_v3", "p0"){ ); """ + sql """ + CREATE TABLE ${indexTbName3} ( + `@timestamp` int(11) NULL COMMENT "", + `clientip` varchar(20) NULL COMMENT "", + `request` text NULL COMMENT "", + `status` int(11) NULL COMMENT "", + `size` int(11) NULL COMMENT "", + INDEX request_idx (`request`) USING INVERTED PROPERTIES("parser" = "english", "support_phrase" = "true", "dict_compression" = "true") COMMENT '' + ) ENGINE=OLAP + DUPLICATE KEY(`@timestamp`) + COMMENT "OLAP" + DISTRIBUTED BY RANDOM BUCKETS 1 + PROPERTIES ( + "replication_allocation" = "tag.location.default: 1", + "inverted_index_storage_format" = "V3" + ); + """ + def load_httplogs_data = {table_name, label, read_flag, format_flag, file_name, ignore_failure=false, expected_succ_rows = -1, load_to_single_tablet = 'true' -> @@ -99,6 +119,7 @@ suite("test_inverted_index_v3", "p0"){ try { load_httplogs_data.call(indexTbName1, indexTbName1, 'true', 'json', 'documents-1000.json') load_httplogs_data.call(indexTbName2, indexTbName2, 'true', 'json', 'documents-1000.json') + load_httplogs_data.call(indexTbName3, indexTbName3, 'true', 'json', 'documents-1000.json') sql "sync" @@ -112,6 +133,11 @@ suite("test_inverted_index_v3", "p0"){ qt_sql """ select count() from ${indexTbName2} where request match_phrase 'hm bg'; """ qt_sql """ select count() from ${indexTbName2} where request match_phrase_prefix 'hm bg'; """ + qt_sql """ select count() from ${indexTbName3} where request match_any 'hm bg'; """ + qt_sql """ select count() from ${indexTbName3} where request match_all 'hm bg'; """ + qt_sql """ select count() from ${indexTbName3} where request match_phrase 'hm bg'; """ + qt_sql """ select count() from ${indexTbName3} where request match_phrase_prefix 'hm bg'; """ + } finally { } } \ No newline at end of file diff --git a/regression-test/suites/mtmv_p0/test_iceberg_mtmv.groovy b/regression-test/suites/mtmv_p0/test_iceberg_mtmv.groovy index 59cf1173acb46b..aee80d8d1693a4 100644 --- a/regression-test/suites/mtmv_p0/test_iceberg_mtmv.groovy +++ b/regression-test/suites/mtmv_p0/test_iceberg_mtmv.groovy @@ -83,6 +83,7 @@ suite("test_iceberg_mtmv", "p0,external,iceberg,external_docker,external_docker_ String icebergDb = "iceberg_mtmv_partition" String icebergTable1 = "tstable" String icebergTable2 = "dtable" + String icebergTable3 = "union_test" sql """drop catalog if exists ${catalog_name} """ sql """create catalog if not exists ${catalog_name} properties ( 'type'='iceberg', @@ -210,6 +211,61 @@ suite("test_iceberg_mtmv", "p0,external,iceberg,external_docker,external_docker_ sql """drop materialized view if exists ${mvName2};""" sql """drop table if exists ${catalog_name}.${icebergDb}.${icebergTable2}""" + // Test rewrite and union partitions + sql """set materialized_view_rewrite_enable_contain_external_table=true;""" + String mvSql = "SELECT par,count(*) as num FROM ${catalog_name}.${icebergDb}.${icebergTable3} group by par" + String mvName = "union_mv" + sql """drop table if exists ${catalog_name}.${icebergDb}.${icebergTable3}""" + sql """ + CREATE TABLE ${catalog_name}.${icebergDb}.${icebergTable3} ( + id int, + value int, + par datetime + ) ENGINE=iceberg + PARTITION BY LIST (day(par)) (); + """ + sql """insert into ${catalog_name}.${icebergDb}.${icebergTable3} values (1, 1, "2024-01-01"), (2, 1, "2024-01-01"), (3, 1, "2024-01-01"), (4, 1, "2024-01-01")""" + sql """insert into ${catalog_name}.${icebergDb}.${icebergTable3} values (1, 2, "2024-01-02"), (2, 2, "2024-01-02"), (3, 2, "2024-01-02")""" + sql """analyze table ${catalog_name}.${icebergDb}.${icebergTable3} with sync""" + + sql """drop materialized view if exists ${mvName};""" + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`par`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS ${mvSql} + """ + + def showPartitions = sql """show partitions from ${mvName}""" + logger.info("showPartitions: " + showPartitions.toString()) + assertTrue(showPartitions.toString().contains("p_20240101000000_20240102000000")) + assertTrue(showPartitions.toString().contains("p_20240102000000_20240103000000")) + + // refresh one partiton + sql """REFRESH MATERIALIZED VIEW ${mvName} partitions(p_20240101000000_20240102000000);""" + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_refresh_one_partition "SELECT * FROM ${mvName} " + def explainOnePartition = sql """ explain ${mvSql} """ + logger.info("explainOnePartition: " + explainOnePartition.toString()) + assertTrue(explainOnePartition.toString().contains("VUNION")) + order_qt_refresh_one_partition_rewrite "${mvSql}" + mv_rewrite_success("${mvSql}", "${mvName}") + + //refresh auto + sql """REFRESH MATERIALIZED VIEW ${mvName} auto""" + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_refresh_auto "SELECT * FROM ${mvName} " + def explainAllPartition = sql """ explain ${mvSql}; """ + logger.info("explainAllPartition: " + explainAllPartition.toString()) + assertTrue(explainAllPartition.toString().contains("VOlapScanNode")) + order_qt_refresh_all_partition_rewrite "${mvSql}" + mv_rewrite_success("${mvSql}", "${mvName}") + + sql """drop materialized view if exists ${mvName};""" + sql """drop table if exists ${catalog_name}.${icebergDb}.${icebergTable3}""" + sql """ drop catalog if exists ${catalog_name} """ } } diff --git a/regression-test/suites/mtmv_p0/test_paimon_mtmv.groovy b/regression-test/suites/mtmv_p0/test_paimon_mtmv.groovy index f2989edbf6dfd6..48d63e03ec3db5 100644 --- a/regression-test/suites/mtmv_p0/test_paimon_mtmv.groovy +++ b/regression-test/suites/mtmv_p0/test_paimon_mtmv.groovy @@ -25,6 +25,24 @@ suite("test_paimon_mtmv", "p0,external,mtmv,external_docker,external_docker_dori String catalogName = "${suiteName}_catalog" String mvName = "${suiteName}_mv" String dbName = context.config.getDbNameByFile(context.file) + String otherDbName = "${suiteName}_otherdb" + String tableName = "${suiteName}_table" + + sql """drop database if exists ${otherDbName}""" + sql """create database ${otherDbName}""" + sql """ + CREATE TABLE ${otherDbName}.${tableName} ( + `user_id` INT, + `num` INT + ) ENGINE=OLAP + DUPLICATE KEY(`user_id`) + DISTRIBUTED BY HASH(`user_id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1') ; + """ + + sql """ + insert into ${otherDbName}.${tableName} values(1,2); + """ String minio_port = context.config.otherConfigs.get("iceberg_minio_port") String externalEnvIp = context.config.otherConfigs.get("externalEnvIp") @@ -99,8 +117,10 @@ suite("test_paimon_mtmv", "p0,external,mtmv,external_docker,external_docker_dori sql """ CREATE MATERIALIZED VIEW ${mvName} BUILD DEFERRED REFRESH AUTO ON MANUAL - DISTRIBUTED BY RANDOM BUCKETS 2 - PROPERTIES ('replication_num' = '1') + KEY(`id`) + COMMENT "comment1" + DISTRIBUTED BY HASH(`id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1',"grace_period"="333") AS SELECT * FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format; """ @@ -113,6 +133,137 @@ suite("test_paimon_mtmv", "p0,external,mtmv,external_docker,external_docker_dori order_qt_not_partition "SELECT * FROM ${mvName} " order_qt_not_partition_after "select SyncWithBaseTables from mv_infos('database'='${dbName}') where Name='${mvName}'" sql """drop materialized view if exists ${mvName};""" + + // refresh on schedule + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD IMMEDIATE REFRESH COMPLETE ON SCHEDULE EVERY 10 SECOND STARTS "9999-12-13 21:07:09" + KEY(`id`) + COMMENT "comment1" + DISTRIBUTED BY HASH(`id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1',"grace_period"="333") + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format; + """ + waitingMTMVTaskFinishedByMvName(mvName) + sql """drop materialized view if exists ${mvName};""" + + // refresh on schedule + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD IMMEDIATE REFRESH AUTO ON commit + KEY(`id`) + COMMENT "comment1" + DISTRIBUTED BY HASH(`id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1',"grace_period"="333") + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format; + """ + waitingMTMVTaskFinishedByMvName(mvName) + sql """drop materialized view if exists ${mvName};""" + + // cross db and join internal table + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`par`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format a left join internal.${otherDbName}.${tableName} b on a.id=b.user_id; + """ + def showJoinPartitionsResult = sql """show partitions from ${mvName}""" + logger.info("showJoinPartitionsResult: " + showJoinPartitionsResult.toString()) + assertTrue(showJoinPartitionsResult.toString().contains("p_a")) + assertTrue(showJoinPartitionsResult.toString().contains("p_b")) + + sql """ + REFRESH MATERIALIZED VIEW ${mvName} partitions(p_a); + """ + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_join_one_partition "SELECT * FROM ${mvName} " + sql """drop materialized view if exists ${mvName};""" + + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`create_date`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.two_partition; + """ + def showTwoPartitionsResult = sql """show partitions from ${mvName}""" + logger.info("showTwoPartitionsResult: " + showTwoPartitionsResult.toString()) + assertTrue(showTwoPartitionsResult.toString().contains("p_20200101")) + assertTrue(showTwoPartitionsResult.toString().contains("p_20380101")) + assertTrue(showTwoPartitionsResult.toString().contains("p_20380102")) + sql """ + REFRESH MATERIALIZED VIEW ${mvName} auto; + """ + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_two_partition "SELECT * FROM ${mvName} " + sql """drop materialized view if exists ${mvName};""" + + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`create_date`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1','partition_sync_limit'='2','partition_date_format'='%Y-%m-%d', + 'partition_sync_time_unit'='MONTH') + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.two_partition; + """ + def showLimitPartitionsResult = sql """show partitions from ${mvName}""" + logger.info("showLimitPartitionsResult: " + showLimitPartitionsResult.toString()) + assertFalse(showLimitPartitionsResult.toString().contains("p_20200101")) + assertTrue(showLimitPartitionsResult.toString().contains("p_20380101")) + assertTrue(showLimitPartitionsResult.toString().contains("p_20380102")) + sql """ + REFRESH MATERIALIZED VIEW ${mvName} auto; + """ + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_limit_partition "SELECT * FROM ${mvName} " + sql """drop materialized view if exists ${mvName};""" + + // not allow date trunc + test { + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by (date_trunc(`create_date`,'month')) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1','partition_sync_limit'='2','partition_date_format'='%Y-%m-%d', + 'partition_sync_time_unit'='MONTH') + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.two_partition; + """ + exception "only support" + } + + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`region`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.null_partition; + """ + def showNullPartitionsResult = sql """show partitions from ${mvName}""" + logger.info("showNullPartitionsResult: " + showNullPartitionsResult.toString()) + assertTrue(showNullPartitionsResult.toString().contains("p_null")) + assertTrue(showNullPartitionsResult.toString().contains("p_NULL")) + assertTrue(showNullPartitionsResult.toString().contains("p_bj")) + sql """ + REFRESH MATERIALIZED VIEW ${mvName} auto; + """ + waitingMTMVTaskFinishedByMvName(mvName) + // Will lose null data + order_qt_null_partition "SELECT * FROM ${mvName} " + sql """drop materialized view if exists ${mvName};""" + sql """drop catalog if exists ${catalogName}""" } diff --git a/regression-test/suites/mtmv_p0/test_paimon_olap_rewrite_mtmv.groovy b/regression-test/suites/mtmv_p0/test_paimon_olap_rewrite_mtmv.groovy new file mode 100644 index 00000000000000..a3ac1c048d30da --- /dev/null +++ b/regression-test/suites/mtmv_p0/test_paimon_olap_rewrite_mtmv.groovy @@ -0,0 +1,115 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_paimon_olap_rewrite_mtmv", "p0,external,mtmv,external_docker,external_docker_doris") { + String enabled = context.config.otherConfigs.get("enablePaimonTest") + if (enabled == null || !enabled.equalsIgnoreCase("true")) { + logger.info("disabled paimon test") + return + } + String suiteName = "test_paimon_olap_rewrite_mtmv" + String catalogName = "${suiteName}_catalog" + String mvName = "${suiteName}_mv" + String dbName = context.config.getDbNameByFile(context.file) + String tableName = "${suiteName}_table" + sql """drop table if exists ${tableName}""" + sql """ + CREATE TABLE ${tableName} ( + `user_id` INT, + `num` INT + ) ENGINE=OLAP + DUPLICATE KEY(`user_id`) + DISTRIBUTED BY HASH(`user_id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1') ; + """ + sql """ + insert into ${tableName} values(1,2); + """ + + sql """analyze table internal.`${dbName}`. ${tableName} with sync""" + sql """alter table internal.`${dbName}`. ${tableName} modify column user_id set stats ('row_count'='1');""" + + String minio_port = context.config.otherConfigs.get("iceberg_minio_port") + String externalEnvIp = context.config.otherConfigs.get("externalEnvIp") + + sql """set materialized_view_rewrite_enable_contain_external_table=true;""" + String mvSql = "SELECT * FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format a left join ${tableName} b on a.id=b.user_id;"; + + sql """drop catalog if exists ${catalogName}""" + sql """CREATE CATALOG ${catalogName} PROPERTIES ( + 'type'='paimon', + 'warehouse' = 's3://warehouse/wh/', + "s3.access_key" = "admin", + "s3.secret_key" = "password", + "s3.endpoint" = "http://${externalEnvIp}:${minio_port}", + "s3.region" = "us-east-1" + );""" + + sql """analyze table ${catalogName}.`test_paimon_spark`.test_tb_mix_format with sync""" + sql """alter table ${catalogName}.`test_paimon_spark`.test_tb_mix_format modify column par set stats ('row_count'='20');""" + + sql """drop materialized view if exists ${mvName};""" + + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`par`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS + ${mvSql} + """ + def showPartitionsResult = sql """show partitions from ${mvName}""" + logger.info("showPartitionsResult: " + showPartitionsResult.toString()) + assertTrue(showPartitionsResult.toString().contains("p_a")) + assertTrue(showPartitionsResult.toString().contains("p_b")) + + // refresh one partitions + sql """ + REFRESH MATERIALIZED VIEW ${mvName} partitions(p_a); + """ + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_refresh_one_partition "SELECT * FROM ${mvName} " + + def explainOnePartition = sql """ explain ${mvSql} """ + logger.info("explainOnePartition: " + explainOnePartition.toString()) + assertTrue(explainOnePartition.toString().contains("VUNION")) + order_qt_refresh_one_partition_rewrite "${mvSql}" + + mv_rewrite_success("${mvSql}", "${mvName}") + + // select p_b should not rewrite + mv_rewrite_fail("SELECT * FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format a left join ${tableName} b on a.id=b.user_id where a.par='b';", "${mvName}") + + //refresh auto + sql """ + REFRESH MATERIALIZED VIEW ${mvName} auto + """ + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_refresh_auto "SELECT * FROM ${mvName} " + + def explainAllPartition = sql """ explain ${mvSql}; """ + logger.info("explainAllPartition: " + explainAllPartition.toString()) + assertTrue(explainAllPartition.toString().contains("VOlapScanNode")) + order_qt_refresh_all_partition_rewrite "${mvSql}" + + mv_rewrite_success("${mvSql}", "${mvName}") + + sql """drop materialized view if exists ${mvName};""" + sql """drop catalog if exists ${catalogName}""" +} + diff --git a/regression-test/suites/mtmv_p0/test_paimon_rewrite_mtmv.groovy b/regression-test/suites/mtmv_p0/test_paimon_rewrite_mtmv.groovy index 985443875c7b26..22a94d46635169 100644 --- a/regression-test/suites/mtmv_p0/test_paimon_rewrite_mtmv.groovy +++ b/regression-test/suites/mtmv_p0/test_paimon_rewrite_mtmv.groovy @@ -75,6 +75,9 @@ suite("test_paimon_rewrite_mtmv", "p0,external,mtmv,external_docker,external_doc mv_rewrite_success("${mvSql}", "${mvName}") + // select p_b should not rewrite + mv_rewrite_fail("SELECT par,count(*) as num FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format where par='b' group by par;", "${mvName}") + //refresh auto sql """ REFRESH MATERIALIZED VIEW ${mvName} auto diff --git a/regression-test/suites/mtmv_p0/test_partition_refresh_mtmv.groovy b/regression-test/suites/mtmv_p0/test_partition_refresh_mtmv.groovy index 8e084091f4d15a..21296fc5878874 100644 --- a/regression-test/suites/mtmv_p0/test_partition_refresh_mtmv.groovy +++ b/regression-test/suites/mtmv_p0/test_partition_refresh_mtmv.groovy @@ -113,20 +113,17 @@ suite("test_partition_refresh_mtmv") { PROPERTIES ('replication_num' = '1') ; """ - try { - sql """ - CREATE MATERIALIZED VIEW ${mvName} - BUILD DEFERRED REFRESH AUTO ON MANUAL - partition by(`date`) - DISTRIBUTED BY RANDOM BUCKETS 2 - PROPERTIES ('replication_num' = '1') - AS - SELECT * FROM ${tableNameNum}; - """ - Assert.fail(); - } catch (Exception e) { - log.info(e.getMessage()) - } + + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`date`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS + SELECT * FROM ${tableNameNum}; + """ + sql """drop table if exists `${tableNameNum}`""" sql """drop materialized view if exists ${mvName};""" diff --git a/regression-test/suites/mv_p0/unique/unique_rewrite.groovy b/regression-test/suites/mv_p0/unique/unique_rewrite.groovy index e8c3dd05f80c92..1e8a37c70919ba 100644 --- a/regression-test/suites/mv_p0/unique/unique_rewrite.groovy +++ b/regression-test/suites/mv_p0/unique/unique_rewrite.groovy @@ -96,6 +96,10 @@ suite("mv_on_unique_table") { AS ${mv1} """) + + def desc_all_mv1 = sql """desc lineitem_2_uniq all;""" + logger.info("desc mv1 is: " + desc_all_mv1.toString()) + explain { sql("""${query1}""") check {result -> @@ -124,6 +128,11 @@ suite("mv_on_unique_table") { AS ${mv2} """) + + def desc_all_mv2 = sql """desc lineitem_2_uniq all;""" + logger.info("desc mv2 is" + desc_all_mv2) + // If exec on fe follower, wait meta data is ready on follower + Thread.sleep(2000) explain { sql("""${query2}""") check {result -> diff --git a/regression-test/suites/nereids_p0/ddl/use/use_command_nereids.groovy b/regression-test/suites/nereids_p0/ddl/use/use_command_nereids.groovy new file mode 100644 index 00000000000000..70e0f3403e5855 --- /dev/null +++ b/regression-test/suites/nereids_p0/ddl/use/use_command_nereids.groovy @@ -0,0 +1,79 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("use_command_nereids") { + String db1 = "test_use_command_db1" + String db2 = "test_use_command_db2" + String tbl1 = "tb1" + String tbl2 = "tb2" + + sql """drop database if exists ${db1};""" + sql """drop database if exists ${db2};""" + // create database + sql """create database ${db1};""" + sql """create database ${db2};""" + //cloud-mode + if (isCloudMode()) { + return + } + // use command + checkNereidsExecute("use ${db1};") + + """drop table if exists ${tbl1};""" + sql """ create table ${db1}.${tbl1} + ( + c1 bigint, + c2 bigint + ) + ENGINE=OLAP + DUPLICATE KEY(c1, c2) + COMMENT 'OLAP' + DISTRIBUTED BY HASH(c1) BUCKETS 1 + PROPERTIES ( + "replication_num" = "1" + ); + """ + qt_show_tables_db1 """show tables;""" + + checkNereidsExecute("use ${db2};") + """drop table if exists ${tbl2};""" + sql """ create table ${db2}.${tbl2} + ( + c1 bigint, + c2 bigint + ) + ENGINE=OLAP + DUPLICATE KEY(c1, c2) + COMMENT 'OLAP' + DISTRIBUTED BY HASH(c1) BUCKETS 1 + PROPERTIES ( + "replication_num" = "1" + ); + """ + + qt_show_tables_db2 """show tables;""" + + checkNereidsExecute("use internal.${db1};") + qt_show_tables_db1 """show tables;""" + checkNereidsExecute("use internal.${db2};") + qt_show_tables_db2 """show tables;""" + + sql """drop table if exists ${db1}.${tbl1};""" + sql """drop table if exists ${db2}.${tbl2};""" + sql """drop database if exists ${db1};""" + sql """drop database if exists ${db2};""" +} \ No newline at end of file diff --git a/regression-test/suites/nereids_p0/sql_functions/datetime_functions/test_date_or_datetime_computation_negative.groovy b/regression-test/suites/nereids_p0/sql_functions/datetime_functions/test_date_or_datetime_computation_negative.groovy index 282a28a903e4a0..53b7385b1535df 100644 --- a/regression-test/suites/nereids_p0/sql_functions/datetime_functions/test_date_or_datetime_computation_negative.groovy +++ b/regression-test/suites/nereids_p0/sql_functions/datetime_functions/test_date_or_datetime_computation_negative.groovy @@ -14,6 +14,7 @@ // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. + suite("test_date_or_datetime_computation_negative") { sql """ CREATE TABLE IF NOT EXISTS test_date_or_datetime_computation_negative ( `row_id` LARGEINT NOT NULL, @@ -50,8 +51,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_sub(datetime, interval 1 year) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_sub(date_null, interval 1 year), date_sub(dateV2_null, interval 1 year), date_sub(datetime_null, interval 1 year) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_1 """SELECT date_sub(date_null, interval 1 year), date_sub(dateV2_null, interval 1 year), date_sub(datetime_null, interval 1 year) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """SELECT date_sub(date, interval 1 month) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" @@ -65,8 +69,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_sub(datetime, interval 1 month) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_sub(date_null, interval 1 month), date_sub(dateV2_null, interval 1 month), date_sub(datetime_null, interval 1 month) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_2 """SELECT date_sub(date_null, interval 1 month), date_sub(dateV2_null, interval 1 month), date_sub(datetime_null, interval 1 month) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """ SELECT date_sub(date, interval 1 week) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" @@ -80,10 +87,12 @@ suite("test_date_or_datetime_computation_negative") { sql """ SELECT date_sub(datetime, interval 1 week) FROM test_date_or_datetime_computation_negative WHERE row_id=1; """ check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_sub(date_null, interval 1 week), date_sub(dateV2_null, interval 1 week), date_sub(datetime_null, interval 1 week) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_3 """SELECT date_sub(date_null, interval 1 week), date_sub(dateV2_null, interval 1 week), date_sub(datetime_null, interval 1 week) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" - test { sql """SELECT date_sub(date, interval 1 day) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> @@ -96,10 +105,12 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_sub(datetime, interval 1 day) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_sub(date_null, interval 1 day), date_sub(dateV2_null, interval 1 day), date_sub(datetime_null, interval 1 day) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_4 """SELECT date_sub(date_null, interval 1 day), date_sub(dateV2_null, interval 1 day), date_sub(datetime_null, interval 1 day) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" - test { sql """SELECT date_sub(date, interval 1 hour) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> @@ -112,8 +123,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_sub(datetime, interval 1 hour) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """ SELECT date_sub(date_null, interval 1 hour), date_sub(dateV2_null, interval 1 hour), date_sub(datetime_null, interval 1 hour) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_5 """ SELECT date_sub(date_null, interval 1 hour), date_sub(dateV2_null, interval 1 hour), date_sub(datetime_null, interval 1 hour) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """SELECT date_sub(date, interval 1 minute) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" @@ -127,8 +141,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_sub(datetime, interval 1 minute) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_sub(date_null, interval 1 minute), date_sub(dateV2_null, interval 1 minute), date_sub(datetime_null, interval 1 minute) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_6 """SELECT date_sub(date_null, interval 1 minute), date_sub(dateV2_null, interval 1 minute), date_sub(datetime_null, interval 1 minute) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """SELECT date_sub(date, interval 1 second) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" @@ -142,8 +159,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_sub(datetime, interval 1 second) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_sub(date_null, interval 1 second), date_sub(dateV2_null, interval 1 second), date_sub(datetime_null, interval 1 second) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_7 """SELECT date_sub(date_null, interval 1 second), date_sub(dateV2_null, interval 1 second), date_sub(datetime_null, interval 1 second) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { @@ -158,8 +178,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_add(datetime, interval 1 year) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_add(date_null, interval 1 year), date_add(dateV2_null, interval 1 year), date_add(datetime_null, interval 1 year) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_8 """SELECT date_add(date_null, interval 1 year), date_add(dateV2_null, interval 1 year), date_add(datetime_null, interval 1 year) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """SELECT date_add(date, interval 1 month) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" @@ -173,8 +196,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_add(datetime, interval 1 month) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_add(date_null, interval 1 month), date_add(dateV2_null, interval 1 month), date_add(datetime_null, interval 1 month) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_9 """SELECT date_add(date_null, interval 1 month), date_add(dateV2_null, interval 1 month), date_add(datetime_null, interval 1 month) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """ SELECT date_add(date, interval 1 week) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" @@ -188,10 +214,12 @@ suite("test_date_or_datetime_computation_negative") { sql """ SELECT date_add(datetime, interval 1 week) FROM test_date_or_datetime_computation_negative WHERE row_id=3; """ check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_add(date_null, interval 1 week), date_add(dateV2_null, interval 1 week), date_add(datetime_null, interval 1 week) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_10 """SELECT date_add(date_null, interval 1 week), date_add(dateV2_null, interval 1 week), date_add(datetime_null, interval 1 week) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" - test { sql """SELECT date_add(date, interval 1 day) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> @@ -204,10 +232,12 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_add(datetime, interval 1 day) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_add(date_null, interval 1 day), date_add(dateV2_null, interval 1 day), date_add(datetime_null, interval 1 day) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_11 """SELECT date_add(date_null, interval 1 day), date_add(dateV2_null, interval 1 day), date_add(datetime_null, interval 1 day) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" - test { sql """SELECT date_add(date, interval 1 hour) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> @@ -220,8 +250,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_add(datetime, interval 1 hour) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """ SELECT date_add(date_null, interval 1 hour), date_add(dateV2_null, interval 1 hour), date_add(datetime_null, interval 1 hour) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_12 """ SELECT date_add(date_null, interval 1 hour), date_add(dateV2_null, interval 1 hour), date_add(datetime_null, interval 1 hour) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """SELECT date_add(date, interval 1 minute) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" @@ -235,8 +268,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_add(datetime, interval 1 minute) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_add(date_null, interval 1 minute), date_add(dateV2_null, interval 1 minute), date_add(datetime_null, interval 1 minute) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_13 """SELECT date_add(date_null, interval 1 minute), date_add(dateV2_null, interval 1 minute), date_add(datetime_null, interval 1 minute) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """SELECT date_add(date, interval 1 second) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" @@ -250,8 +286,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_add(datetime, interval 1 second) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_add(date_null, interval 1 second), date_add(dateV2_null, interval 1 second), date_add(datetime_null, interval 1 second) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_14 """SELECT date_add(date_null, interval 1 second), date_add(dateV2_null, interval 1 second), date_add(datetime_null, interval 1 second) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" // TODO: // nagetive test for microseconds_add/milliseconds_add/seconds_add/minutes_add/hours_add/days_add/weeks_add/months_add/years_add @@ -268,8 +307,9 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT hours_add(datetime, 24) FROM test_date_or_datetime_computation_negative WHERE row_id = 3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT hours_add(date_null, 24), hours_add(dateV2_null, 24), hours_add(datetime_null, 24) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_15 """SELECT hours_add(date_null, 24), hours_add(dateV2_null, 24), hours_add(datetime_null, 24) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" - - sql "DROP TABLE test_date_or_datetime_computation_negative" } diff --git a/regression-test/suites/nereids_rules_p0/expression/test_simplify_comparison_predicate.groovy b/regression-test/suites/nereids_rules_p0/expression/test_simplify_comparison_predicate.groovy new file mode 100644 index 00000000000000..af975aeeaa22e7 --- /dev/null +++ b/regression-test/suites/nereids_rules_p0/expression/test_simplify_comparison_predicate.groovy @@ -0,0 +1,170 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// TODO: date datetime comparison still has bug, need fix. +suite('test_simplify_comparison_predicate', 'nonConcurrent') { + def tbl = 'test_simplify_comparison_predicate_tbl' + def checkExplain = { expression, resExpression -> + def checker = { explainString, exception, startTime, endTime -> + assertNull(exception) + def foundOutputExprs = false + def succ = false + for (def line : explainString.split('\n')) { + if (foundOutputExprs) { + assertTrue(line.contains(resExpression), "'${line}' no contains '${resExpression}'") + succ = true + break + } + if (line.contains('OUTPUT EXPRS:')) { + foundOutputExprs = true + } + } + assertTrue(foundOutputExprs) + assertTrue(succ) + } + + explain { + sql "SELECT ${expression} FROM ${tbl}" + check checker + } + } + def testSimplify = { checkNullColumn, checkNotNullColumn, expression, resExpression -> + def types = [''] + def column = '' + if (expression.contains('{int_like_column}')) { + column = '{int_like_column}' + types = ['tinyint', 'smallint', 'int', 'bigint'] + } else if (expression.contains('{decimal_column}')) { + column = '{decimal_column}' + types = ['decimal_3_0', 'decimal_5_2'] + } else if (expression.contains('{date_column}')) { + column = '{date_column}' + types = ['date', 'datev1'] + } else if (expression.contains('{datetime_column}')) { + column = '{datetime_column}' + types = ['datetime_0', 'datetime_3', 'datetimev1'] + } + for (def type : types) { + if (type == '') { + checkExplain expression, resExpression + } else { + if (checkNullColumn) { + checkExplain expression.replace(column, "c_${type}_null"), resExpression.replace(column, "c_${type}_null") + } + if (checkNotNullColumn) { + checkExplain expression.replace(column, "c_${type}"), resExpression.replace(column, "c_${type}") + } + } + } + } + + setFeConfigTemporary([disable_datev1:false, disable_decimalv2:false]) { + sql """ + DROP TABLE IF EXISTS ${tbl} FORCE; + + CREATE TABLE ${tbl} ( + c_tinyint tinyint not null default 1, + c_tinyint_null tinyint, + c_smallint smallint not null default 1, + c_smallint_null smallint, + c_int int not null default 1, + c_int_null int, + c_bigint bigint not null default 1, + c_bigint_null bigint, + c_decimal_3_0 decimal(3, 0) not null default 1, + c_decimal_3_0_null decimal(3, 0), + c_decimal_5_2 decimal(5, 2) not null default 1, + c_decimal_5_2_null decimal(5, 2), + c_date date not null default '2000-01-01', + c_date_null date, + c_datev1 datev1 not null default '2000-01-01', + c_datev1_null datev1 null, + c_datetime_0 datetime(0) not null default '2000-01-01 00:00:00', + c_datetime_0_null datetime(0), + c_datetime_3 datetime(3) not null default '2000-01-01 00:00:00', + c_datetime_3_null datetime(3), + c_datetimev1 datetimev1 not null default '2000-01-01 00:00:00', + c_datetimev1_null datetimev1 + ) + PROPERTIES ('replication_num' = '1'); + + INSERT INTO ${tbl} VALUES(); + """ + + testSimplify true, true, '{int_like_column} = CAST(1.00 as DOUBLE)', '({int_like_column} = 1)' + testSimplify true, false, '{int_like_column} = CAST(1.01 as DOUBLE)', 'AND[{int_like_column} IS NULL,NULL]' + testSimplify false, true, '{int_like_column} = CAST(1.01 as DOUBLE)', 'FALSE' + testSimplify true, true, '{int_like_column} <=> CAST(1.01 as DOUBLE)', 'FALSE' + testSimplify true, true, '{int_like_column} > CAST(1.00 as DOUBLE)', '({int_like_column} > 1)' + testSimplify true, true, '{int_like_column} < CAST(1.00 as DOUBLE)', '({int_like_column} < 1)' + testSimplify true, true, '{int_like_column} > CAST(1.01 as DOUBLE)', '({int_like_column} > 1)' + testSimplify true, true, '{int_like_column} >= CAST(1.01 as DOUBLE)', '({int_like_column} >= 2)' + testSimplify true, true, '{int_like_column} <= CAST(1.01 as DOUBLE)', '({int_like_column} <= 1)' + testSimplify true, true, '{int_like_column} < CAST(1.01 as DOUBLE)', '({int_like_column} < 2)' + testSimplify true, true, '{int_like_column} = 1.00', '({int_like_column} = 1)' + testSimplify true, true, '{int_like_column} > 1.00', '({int_like_column} > 1)' + testSimplify true, true, '{int_like_column} < 1.00', '({int_like_column} < 1)' + testSimplify true, false, '{int_like_column} = 1.01', 'AND[{int_like_column} IS NULL,NULL]' + testSimplify false, true, '{int_like_column} = 1.01', 'FALSE' + testSimplify true, true, '{int_like_column} <=> 1.01', 'FALSE' + testSimplify true, true, '{int_like_column} > 1.01', '({int_like_column} > 1)' + testSimplify true, true, '{int_like_column} >= 1.01', '({int_like_column} >= 2)' + testSimplify true, true, '{int_like_column} <= 1.01', '({int_like_column} <= 1)' + testSimplify true, true, '{int_like_column} < 1.01', '({int_like_column} < 2)' + testSimplify false, false, 'CAST(c_decimal_3_0_null as DECIMAL(10, 5)) = CAST(1.00 as DECIMAL(10, 5))', '(c_decimal_3_0_null = 1)' + testSimplify false, false, 'CAST(c_decimal_3_0_null as DECIMAL(10, 5)) = CAST(1.1 as DECIMAL(10, 5))', 'AND[c_decimal_3_0_null IS NULL,NULL]' + testSimplify false, false, 'CAST(c_decimal_3_0_null as DECIMAL(10, 5)) > CAST(1.1 as DECIMAL(10, 5))', '(c_decimal_3_0_null > 1)' + testSimplify false, false, 'CAST(c_decimal_3_0_null as DECIMAL(10, 5)) >= CAST(1.1 as DECIMAL(10, 5))', '(c_decimal_3_0_null >= 2)' + testSimplify false, false, 'CAST(c_decimal_3_0_null as DECIMAL(10, 5)) < CAST(1.1 as DECIMAL(10, 5))', '(c_decimal_3_0_null < 2)' + testSimplify false, false, 'CAST(c_decimal_3_0_null as DECIMAL(10, 5)) <= CAST(1.1 as DECIMAL(10, 5))', '(c_decimal_3_0_null <= 1)' + testSimplify false, false, 'c_decimal_5_2_null = CAST(1.0 as DECIMAL(10, 5))', '(c_decimal_5_2_null = 1.00)' + testSimplify false, false, 'c_decimal_5_2_null = CAST(1.1 as DECIMAL(10, 5))', '(c_decimal_5_2_null = 1.10)' + testSimplify false, false, 'c_decimal_5_2_null = CAST(1.12 as DECIMAL(10, 5))', '(c_decimal_5_2_null = 1.12)' + testSimplify false, false, 'c_decimal_5_2_null = CAST(1.123 as DECIMAL(10, 5))', 'AND[c_decimal_5_2_null IS NULL,NULL]' + testSimplify false, false, 'c_decimal_5_2 = CAST(1.123 as DECIMAL(10, 5))', 'FALSE' + testSimplify false, false, 'c_decimal_5_2_null > CAST(1.123 as DECIMAL(10, 5))', 'c_decimal_5_2_null > 1.12' + testSimplify false, false, 'c_decimal_5_2_null >= CAST(1.123 as DECIMAL(10, 5))', 'c_decimal_5_2_null >= 1.13' + testSimplify false, false, 'c_decimal_5_2_null <= CAST(1.123 as DECIMAL(10, 5))', 'c_decimal_5_2_null <= 1.12' + testSimplify false, false, 'c_decimal_5_2_null < CAST(1.123 as DECIMAL(10, 5))', 'c_decimal_5_2_null < 1.13' + testSimplify false, false, "CAST(c_datetime_0 AS DATETIME(5)) = '2000-01-01'", "(c_datetime_0 = '2000-01-01 00:00:00')" + testSimplify false, false, "CAST(c_datetime_0 AS DATETIME(5)) = '2000-01-01 00:00:00.1'", 'FALSE' + testSimplify false, false, "CAST(c_datetime_0_null AS DATETIME(5)) = '2000-01-01 00:00:00.1'", 'AND[c_datetime_0_null IS NULL,NULL]' + testSimplify false, false, "CAST(c_datetime_0_null AS DATETIME(5)) <=> '2000-01-01 00:00:00.1'", 'FALSE' + testSimplify false, false, "CAST(c_datetime_0 AS DATETIME(5)) >= '2000-01-01 00:00:00.1'", "(c_datetime_0 >= '2000-01-01 00:00:01')" + testSimplify false, false, "CAST(c_datetime_0 AS DATETIME(5)) > '2000-01-01 00:00:00.1'", "(c_datetime_0 > '2000-01-01 00:00:00')" + testSimplify false, false, "CAST(c_datetime_0 AS DATETIME(5)) <= '2000-01-01 00:00:00.1'", "(c_datetime_0 <= '2000-01-01 00:00:00')" + testSimplify false, false, "CAST(c_datetime_0 AS DATETIME(5)) < '2000-01-01 00:00:00.1'", "(c_datetime_0 < '2000-01-01 00:00:01')" + testSimplify false, false, "CAST(c_datetime_3 AS DATETIME(5)) = '2000-01-01'", "(c_datetime_3 = '2000-01-01 00:00:00.000')" + testSimplify false, false, "CAST(c_datetime_3 AS DATETIME(5)) = '2000-01-01 00:00:00.1234'", 'FALSE' + testSimplify false, false, "CAST(c_datetime_3_null AS DATETIME(5)) = '2000-01-01 00:00:00.1234'", 'AND[c_datetime_3_null IS NULL,NULL]' + testSimplify false, false, "CAST(c_datetime_3_null AS DATETIME(5)) <=> '2000-01-01 00:00:00.1234'", 'FALSE' + testSimplify false, false, "CAST(c_datetime_3 AS DATETIME(5)) >= '2000-01-01 00:00:00.1234'", "(c_datetime_3 >= '2000-01-01 00:00:00.124')" + testSimplify false, false, "CAST(c_datetime_3 AS DATETIME(5)) > '2000-01-01 00:00:00.1234'", "(c_datetime_3 > '2000-01-01 00:00:00.123')" + testSimplify false, false, "CAST(c_datetime_3 AS DATETIME(5)) <= '2000-01-01 00:00:00.1234'", "(c_datetime_3 <= '2000-01-01 00:00:00.123')" + testSimplify false, false, "CAST(c_datetime_3 AS DATETIME(5)) < '2000-01-01 00:00:00.1234'", "(c_datetime_3 < '2000-01-01 00:00:00.124')" + testSimplify false, false, "c_date = '2000-01-01 00:00:01'", 'FALSE' + testSimplify false, false, "CAST(c_date_null AS DATETIME(5)) = '2000-01-01 00:00:01'", 'AND[c_date_null IS NULL,NULL]' + testSimplify false, false, "CAST(c_date_null AS DATETIME(5)) <=> '2000-01-01 00:00:01'", 'FALSE' + testSimplify false, false, "CAST(c_date AS DATETIME(5)) > '2000-01-01 00:00:01'", "c_date > '2000-01-01'" + testSimplify false, false, "CAST(c_date AS DATETIME(5)) >= '2000-01-01 00:00:01'", "c_date >= '2000-01-02'" + testSimplify false, false, "CAST(c_date AS DATETIME(5)) <= '2000-01-01 00:00:01'", "c_date <= '2000-01-01'" + testSimplify false, false, "CAST(c_date AS DATETIME(5)) < '2000-01-01 00:00:01'", "c_date < '2000-01-02'" + + sql "DROP TABLE IF EXISTS ${tbl} FORCE" + } +} diff --git a/regression-test/suites/nereids_rules_p0/mv/create_part_and_up/range_date_part_up_rewrite.groovy b/regression-test/suites/nereids_rules_p0/mv/create_part_and_up/range_date_part_up_rewrite.groovy index 88d8ad6ea2d84d..35d4a60e6255bb 100644 --- a/regression-test/suites/nereids_rules_p0/mv/create_part_and_up/range_date_part_up_rewrite.groovy +++ b/regression-test/suites/nereids_rules_p0/mv/create_part_and_up/range_date_part_up_rewrite.groovy @@ -169,7 +169,7 @@ suite("mtmv_range_date_part_up_rewrite") { for (int i = 0; i < mv_name_list.size(); i++) { def job_name = getJobName(db, mv_name_list[i]) waitingMTMVTaskFinished(job_name) - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } @@ -178,38 +178,38 @@ suite("mtmv_range_date_part_up_rewrite") { sql """insert into lineitem_range_date_union values (1, null, 3, 1, 5.5, 6.5, 7.5, 8.5, 'o', 'k', '2023-10-18', '2023-10-18', 'a', 'b', 'yyyyyyyyy', '2023-11-01')""" for (int i = 0; i < mv_name_list.size(); i++) { - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } for (int i = 0; i < mv_name_list.size(); i++) { sql """refresh MATERIALIZED VIEW ${mv_name_list[i]} auto;""" - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } sql """insert into lineitem_range_date_union values (2, null, 3, 1, 5.5, 6.5, 7.5, 8.5, 'o', 'k', '2023-10-18', '2023-10-18', 'a', 'b', 'yyyyyyyyy', '2023-11-01');""" for (int i = 0; i < mv_name_list.size(); i++) { - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } for (int i = 0; i < mv_name_list.size(); i++) { sql """refresh MATERIALIZED VIEW ${mv_name_list[i]} auto;""" - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } sql """ALTER TABLE lineitem_range_date_union DROP PARTITION IF EXISTS p4 FORCE""" for (int i = 0; i < mv_name_list.size(); i++) { - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } for (int i = 0; i < mv_name_list.size(); i++) { sql """refresh MATERIALIZED VIEW ${mv_name_list[i]} auto;""" - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } diff --git a/regression-test/suites/nereids_rules_p0/mv/dimension/dimension_2_inner_join.groovy b/regression-test/suites/nereids_rules_p0/mv/dimension/dimension_2_inner_join.groovy index a615c7316bdb56..44fc259a71a1cc 100644 --- a/regression-test/suites/nereids_rules_p0/mv/dimension/dimension_2_inner_join.groovy +++ b/regression-test/suites/nereids_rules_p0/mv/dimension/dimension_2_inner_join.groovy @@ -19,7 +19,7 @@ This suite is a two dimensional test case file. It mainly tests the inner join and filter positions. */ -suite("partition_mv_rewrite_dimension_2_2") { +suite("dimension_2_inner_join") { String db = context.config.getDbNameByFile(context.file) sql "use ${db}" diff --git a/regression-test/suites/nereids_rules_p0/mv/is_in_debug_mode/is_in_debug_mode.groovy b/regression-test/suites/nereids_rules_p0/mv/is_in_debug_mode/is_in_debug_mode.groovy index 15d93e32f65dc2..f973d031adeee0 100644 --- a/regression-test/suites/nereids_rules_p0/mv/is_in_debug_mode/is_in_debug_mode.groovy +++ b/regression-test/suites/nereids_rules_p0/mv/is_in_debug_mode/is_in_debug_mode.groovy @@ -83,7 +83,9 @@ suite("is_in_debug_mode") { AS select * from orders where o_orderkey > 2; """ } catch (Exception e) { - Assert.assertTrue(e.getMessage().contains("because is in debug mode")) + def message = e.getMessage() + logger.info("test_create_mv1" + message) + Assert.assertTrue(message.contains("because is in debug mode")) } sql """set skip_delete_sign = false;""" @@ -99,7 +101,9 @@ suite("is_in_debug_mode") { AS select * from orders where o_orderkey > 2; """ } catch (Exception e) { - Assert.assertTrue(e.getMessage().contains("because is in debug mode")) + def message = e.getMessage() + logger.info("test_create_mv2" + message) + Assert.assertTrue(message.contains("because is in debug mode")) } sql """set skip_storage_engine_merge = false;""" @@ -115,7 +119,9 @@ suite("is_in_debug_mode") { AS select * from orders where o_orderkey > 2; """ } catch (Exception e) { - Assert.assertTrue(e.getMessage().contains("because is in debug mode")) + def message = e.getMessage() + logger.info("test_create_mv3: " + message) + Assert.assertTrue(message.contains("because is in debug mode")) } sql """set skip_delete_bitmap = false;""" @@ -131,7 +137,9 @@ suite("is_in_debug_mode") { AS select * from orders where o_orderkey > 2; """ } catch (Exception e) { - Assert.assertTrue(e.getMessage().contains("because is in debug mode")) + def message = e.getMessage() + logger.info("test_create_mv4" + message) + Assert.assertTrue(message.contains("because is in debug mode")) } sql """set skip_delete_predicate = false;""" @@ -147,7 +155,9 @@ suite("is_in_debug_mode") { AS select * from orders where o_orderkey > 2; """ } catch (Exception e) { - Assert.assertTrue(e.getMessage().contains("because is in debug mode")) + def message = e.getMessage() + logger.info("test_create_mv5" + message) + Assert.assertTrue(message.contains("because is in debug mode")) } sql """set show_hidden_columns = false;""" diff --git a/regression-test/suites/nereids_rules_p0/mv/nested/nested_mv_delete.groovy b/regression-test/suites/nereids_rules_p0/mv/nested/nested_mv_delete.groovy new file mode 100644 index 00000000000000..e7556094e71c10 --- /dev/null +++ b/regression-test/suites/nereids_rules_p0/mv/nested/nested_mv_delete.groovy @@ -0,0 +1,77 @@ +package mv.nested +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("nested_mv_delete") { + + String db = context.config.getDbNameByFile(context.file) + sql "use ${db}" + sql "set runtime_filter_mode=OFF"; + sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" + + sql """ + drop table if exists orders_1 + """ + + sql """ + CREATE TABLE IF NOT EXISTS orders_1 ( + o_orderkey INTEGER NOT NULL, + o_custkey INTEGER NOT NULL, + o_orderstatus CHAR(1) NOT NULL, + o_totalprice DECIMALV3(15,2) NOT NULL, + o_orderdate DATE NOT NULL, + o_orderpriority CHAR(15) NOT NULL, + o_clerk CHAR(15) NOT NULL, + o_shippriority INTEGER NOT NULL, + o_comment VARCHAR(79) NOT NULL, + public_col INT NULL + ) + DUPLICATE KEY(o_orderkey, o_custkey) + DISTRIBUTED BY HASH(o_orderkey) BUCKETS 3 + PROPERTIES ( + "replication_num" = "1" + ); + """ + + sql """ + insert into orders_1 values + (1, 1, 'o', 9.5, '2023-12-08', 'a', 'b', 1, 'yy', 1), + (1, 1, 'o', 10.5, '2023-12-08', 'a', 'b', 1, 'yy', null), + (2, 1, 'o', 11.5, '2023-12-09', 'a', 'b', 1, 'yy', 2), + (3, 1, 'o', 12.5, '2023-12-10', 'a', 'b', 1, 'yy', null), + (3, 1, 'o', 33.5, '2023-12-10', 'a', 'b', 1, 'yy', 3), + (4, 2, 'o', 43.2, '2023-12-11', 'c','d',2, 'mm', null), + (5, 2, 'o', 56.2, '2023-12-12', 'c','d',2, 'mi', 4), + (5, 2, 'o', 1.2, '2023-12-12', 'c','d',2, 'mi', null); + """ + + sql """alter table orders_1 modify column o_comment set stats ('row_count'='8');""" + + + create_async_mv(db, "mv_level_1", """ + select * from orders_1; + """) + + create_async_mv(db, "mv_level_2", """ + select * from mv_level_1; + """) + + sql """drop materialized view mv_level_1;""" + + order_qt_query_after_delete "select * from mv_level_2" + sql """ DROP MATERIALIZED VIEW IF EXISTS mv_level_2""" +} diff --git a/regression-test/suites/nereids_rules_p0/partition_prune/test_add_sub_diff_ceil_floor.groovy b/regression-test/suites/nereids_rules_p0/partition_prune/test_add_sub_diff_ceil_floor.groovy new file mode 100644 index 00000000000000..bda9dc81af7cfe --- /dev/null +++ b/regression-test/suites/nereids_rules_p0/partition_prune/test_add_sub_diff_ceil_floor.groovy @@ -0,0 +1,407 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_add_sub_diff_ceil_floor") { + sql "set disable_nereids_rules='REWRITE_FILTER_EXPRESSION'" + sql "drop table if exists test_add_sub_diff_ceil_floor_t" + sql """create table test_add_sub_diff_ceil_floor_t (a int, dt datetime, d date, c varchar(100)) duplicate key(a) + partition by range(dt) ( + partition p1 values less than ("2017-01-01"), + partition p2 values less than ("2018-01-01"), + partition p3 values less than ("2019-01-01"), + partition p4 values less than ("2020-01-01"), + partition p5 values less than ("2021-01-01") + ) distributed by hash(a) properties("replication_num"="1");""" + sql """INSERT INTO test_add_sub_diff_ceil_floor_t SELECT number, + date_add('2016-01-01 00:00:00', interval number month), + cast(date_add('2022-01-01 00:00:00', interval number month) as date), cast(number as varchar(65533)) FROM numbers('number'='55');""" + sql "INSERT INTO test_add_sub_diff_ceil_floor_t values(3,null,null,null);" + + // xx_add + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_add(dt,1) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where months_add(dt,2) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where days_add(dt,10) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hours_add(dt,1) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where minutes_add(dt,2) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where seconds_add(dt,10) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where milliseconds_add(dt,2) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where microseconds_add(dt,10) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + // xx_sub + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_sub(dt,1) <='2018-01-01' """ + contains("4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where months_sub(dt,2) <='2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where days_sub(dt,10) <='2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hours_sub(dt,1) <='2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where minutes_sub(dt,2) <= '2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where seconds_sub(dt,10) <= '2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where milliseconds_sub(dt,2) <= '2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where microseconds_sub(dt,10) <= '2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + + // xx_diff + // first arg is dt. positive + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_diff(dt,'2017-01-01') <2 """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where months_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where days_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hours_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where minutes_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where seconds_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where milliseconds_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where microseconds_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + // second arg is dt. not positive + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_diff('2021-01-01',dt) <2 """ + contains("partitions=2/5 (p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where months_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where days_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hours_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where minutes_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where seconds_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where milliseconds_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where microseconds_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_diff('2021-01-01',dt) <=2 """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where months_diff('2020-01-01',dt) >2 """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where days_diff('2020-01-01',dt) >=2 """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + + // xx_ceil + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where year_ceil(dt) <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where month_ceil(dt) <'2019-02-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where day_ceil(dt) <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hour_ceil(dt) <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where minute_ceil(dt) <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where second_ceil(dt) <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + // xx_ceil with other args + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where year_ceil(dt,5) <'2019-01-01' """ + contains("partitions=1/5 (p1)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where year_ceil(dt,'2013-01-01') <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where year_ceil(dt,5,'2013-01-01') <'2019-01-01'""" + contains(" partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hour_ceil(dt,c) <'2019-01-01' """ + contains("partitions=5/5 (p1,p2,p3,p4,p5)") + } + + // xx_floor + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where year_floor(dt) <='2019-01-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where month_floor(dt) <='2019-02-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where day_floor(dt) <='2019-01-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hour_floor(dt) <='2019-01-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where minute_floor(dt) <='2019-01-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where second_floor(dt) <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + + // xx_floor with other args + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where month_floor(dt,'2015-01-01') <='2019-02-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where month_floor(dt,5,'2015-01-01') <='2019-02-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where month_floor(dt,5) <='2019-02-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hour_floor(dt,c,'2015-01-01') <='2019-01-01' """ + contains("partitions=5/5 (p1,p2,p3,p4,p5)") + } + + // diff nest function + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_diff('2021-01-01',month_ceil(hours_add(dt, 1))) <=2 """ + contains("partitions=4/5 (p2,p3,p4,p5)") + } + explain { + sql "select * from test_add_sub_diff_ceil_floor_t where years_diff('2021-01-01',month_ceil(hours_sub(dt, 1))) <=2" + contains("partitions=4/5 (p1,p3,p4,p5)") + } + // mixed with non-function predicates + explain { + sql "select * from test_add_sub_diff_ceil_floor_t where years_diff('2021-01-01',month_ceil(hours_sub(dt, 1))) <=2 and dt>'2019-06-01'" + contains("partitions=2/5 (p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_diff('2021-01-01',month_ceil(hours_sub(dt, 1))) <=2 and date_trunc(dt,'day')>'2019-06-01' """ + contains("partitions=2/5 (p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where months_diff(months_add(dt,10), '2018-01-01') =2 """ + contains("partitions=1/5 (p2)") + } + + // hours_add second arg is not literal, so will not do pruning + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hours_add(dt, years_diff(dt,'2018-01-01')) <'2018-01-01' """ + contains("partitions=5/5 (p1,p2,p3,p4,p5)") + } + + // max + sql "drop table if exists max_t" + sql """create table max_t (a int, dt datetime, d date, c varchar(100)) duplicate key(a) + partition by range(dt) ( + partition p1 values less than ("2017-01-01"), + partition p2 values less than ("2018-01-01"), + partition p3 values less than ("2019-01-01"), + partition p4 values less than ("2020-01-01"), + partition p5 values less than ("2021-01-01"), + partition p6 values less than MAXVALUE + ) distributed by hash(a) properties("replication_num"="1");""" + sql """INSERT INTO max_t SELECT number, + date_add('2016-01-01 00:00:00', interval number month), + cast(date_add('2022-01-01 00:00:00', interval number month) as date), cast(number as varchar(65533)) FROM numbers('number'='100');""" + sql "INSERT INTO max_t values(3,null,null,null);" + + explain { + sql "select * from max_t where years_diff('2021-01-01',month_ceil(hours_add(dt, 1),'1990-01-05')) <=2 ;" + contains("partitions=5/6 (p2,p3,p4,p5,p6)") + } + explain { + sql "select * from max_t where years_diff('2021-01-01',month_ceil(hours_add(dt, 1),10,'1990-01-05')) <=2 ;" + contains("partitions=5/6 (p2,p3,p4,p5,p6)") + } + + explain { + sql """select * from max_t where years_diff('2021-01-01',month_ceil(hours_add(dt, 1),10,'1990-01-05')) <=2 and dt >'2018-01-01';""" + contains("partitions=4/6 (p3,p4,p5,p6)") + } + + explain { + sql """select * from max_t where months_diff('2021-01-01',month_floor(hours_add(dt, 1),10,'1990-01-05')) <=2;""" + contains("partitions=3/6 (p1,p5,p6)") + } + + explain { + sql """select * from max_t where months_diff('2021-01-01',month_floor(hours_add(dt, 1),12,'1000-01-01')) > 2""" + contains("partitions=5/6 (p1,p2,p3,p4,p5)") + } + explain { + sql """select * from max_t where months_diff('2021-01-01',month_floor(hours_add(dt, 1),12,'1000-01-01')) > 2 and month_floor(dt) >'2018-01-01' """ + contains("partitions=3/6 (p3,p4,p5)") + } + explain { + sql """select * from max_t where hours_sub(hours_add(dt, 1),1) >'2018-01-01' and days_diff(hours_sub(hours_add(dt, 1),1),'2021-01-01') >2""" + contains("partitions=1/6 (p6)") + } + + // from_days and unix_timestamp + explain { + sql """select * from max_t where unix_timestamp(dt) > 1547838847 """ + contains("partitions=3/6 (p4,p5,p6)") + } + + sql "drop table if exists partition_int_from_days" + sql """ + CREATE TABLE `partition_int_from_days` ( + `a` int NULL, + `b` int NULL + ) ENGINE=OLAP + DUPLICATE KEY(`a`, `b`) + PARTITION BY RANGE(`a`) + (PARTITION p1 VALUES [("-2147483648"), ("100000")), + PARTITION p2 VALUES [("100000"), ("738000")), + PARTITION p3 VALUES [("738000"), ("90000000")), + PARTITION p4 VALUES [("90000000"), (MAXVALUE))) + DISTRIBUTED BY HASH(`a`) BUCKETS 10 + PROPERTIES ( + "replication_allocation" = "tag.location.default: 1" + ); """ + sql """ + insert into partition_int_from_days values(100,100),(100022,1002),(738004,33),(90000003,89); + """ + explain { + sql """select * from partition_int_from_days where from_days(a)>'2020-07-29' """ + contains("partitions=3/4 (p1,p3,p4)") + } + + + sql "drop table if exists unix_time_t" + sql """create table unix_time_t (a int, dt datetime, d date, c varchar(100)) duplicate key(a) + partition by range(dt) ( + partition p1 values less than ("1980-01-01"), + partition p2 values less than ("2018-01-01"), + partition p3 values less than ("2039-01-01"), + partition p4 values less than MAXVALUE + ) distributed by hash(a) properties("replication_num"="1");""" + sql """INSERT INTO unix_time_t values(1,'1979-01-01','1979-01-01','abc'),(1,'2012-01-01','2012-01-01','abc'),(1,'2020-01-01','2020-01-01','abc'),(1,'2045-01-01','2045-01-01','abc')""" + sql "INSERT INTO unix_time_t values(3,null,null,null);" + explain { + sql """ select * from unix_time_t where unix_timestamp(dt) > 1514822400 """ + contains("partitions=2/4 (p3,p4)") + } + explain { + sql """select * from unix_time_t where unix_timestamp(dt) < 2147454847;""" + contains("partitions=4/4 (p1,p2,p3,p4)") + } + explain { + sql """select * from unix_time_t where unix_timestamp(dt) = 2147454847""" + contains("partitions=2/4 (p3,p4)") + } + explain { + sql """select * from unix_time_t where unix_timestamp(dt) = 2147454847 and dt<'2038-01-01'""" + contains("partitions=1/4 (p3)") + } + explain { + sql """select * from unix_time_t where unix_timestamp(dt) <=0""" + contains("partitions=3/4 (p1,p3,p4)") + } + +} \ No newline at end of file diff --git a/regression-test/suites/nereids_rules_p0/partition_prune/test_convert_tz.groovy b/regression-test/suites/nereids_rules_p0/partition_prune/test_convert_tz.groovy index c309d10d067194..3e033a78eb963c 100644 --- a/regression-test/suites/nereids_rules_p0/partition_prune/test_convert_tz.groovy +++ b/regression-test/suites/nereids_rules_p0/partition_prune/test_convert_tz.groovy @@ -46,13 +46,13 @@ suite("test_convert_tz") { } explain { sql "SELECT * FROM test_convert_tz WHERE convert_tz(timestamp, 'Asia/Shanghai', 'Europe/Paris') > '2021-01-01';"; - contains("partitions=2/3 (p2,p3)") + contains("partitions=3/3 (p1,p2,p3)") } explain { sql """SELECT * FROM test_convert_tz WHERE convert_tz(timestamp, 'Asia/Shanghai', 'Europe/Paris') < '2021-02-24' and convert_tz(timestamp, 'Asia/Shanghai', 'Europe/Paris') > '2021-01-01';""" - contains("partitions=2/3 (p2,p3)") + contains("partitions=3/3 (p1,p2,p3)") } explain { @@ -93,7 +93,7 @@ suite("test_convert_tz") { } explain { sql "SELECT * FROM test_convert_tz WHERE not convert_tz(timestamp, 'Asia/Shanghai', 'Europe/Paris') <= '2021-01-01';"; - contains("partitions=2/3 (p2,p3)") + contains("partitions=3/3 (p1,p2,p3)") } } } \ No newline at end of file diff --git a/regression-test/suites/new_shapes_p0/clickbench/load.groovy b/regression-test/suites/new_shapes_p0/clickbench/load.groovy deleted file mode 100644 index 8ebc0035d2a22b..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/load.groovy +++ /dev/null @@ -1,149 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Most of the cases are copied from https://github.com/trinodb/trino/tree/master -// /testing/trino-product-tests/src/main/resources/sql-tests/testcases -// and modified by Doris. - -// syntax error: -// q06 q13 q15 -// Test 23 suites, failed 3 suites - -// Note: To filter out tables from sql files, use the following one-liner comamnd -// sed -nr 's/.*tables: (.*)$/\1/gp' /path/to/*.sql | sed -nr 's/,/\n/gp' | sort | uniq -suite("load") { - if (isCloudMode()) { - return - } - - sql """ - DROP TABLE IF EXISTS hits - """ - - sql """ - CREATE TABLE IF NOT EXISTS hits ( - CounterID INT NOT NULL, - EventDate Datev2 NOT NULL, - UserID BIGINT NOT NULL, - EventTime DateTimev2 NOT NULL, - WatchID BIGINT NOT NULL, - JavaEnable SMALLINT NOT NULL, - Title STRING NOT NULL, - GoodEvent SMALLINT NOT NULL, - ClientIP INT NOT NULL, - RegionID INT NOT NULL, - CounterClass SMALLINT NOT NULL, - OS SMALLINT NOT NULL, - UserAgent SMALLINT NOT NULL, - URL STRING NOT NULL, - Referer STRING NOT NULL, - IsRefresh SMALLINT NOT NULL, - RefererCategoryID SMALLINT NOT NULL, - RefererRegionID INT NOT NULL, - URLCategoryID SMALLINT NOT NULL, - URLRegionID INT NOT NULL, - ResolutionWidth SMALLINT NOT NULL, - ResolutionHeight SMALLINT NOT NULL, - ResolutionDepth SMALLINT NOT NULL, - FlashMajor SMALLINT NOT NULL, - FlashMinor SMALLINT NOT NULL, - FlashMinor2 STRING NOT NULL, - NetMajor SMALLINT NOT NULL, - NetMinor SMALLINT NOT NULL, - UserAgentMajor SMALLINT NOT NULL, - UserAgentMinor VARCHAR(255) NOT NULL, - CookieEnable SMALLINT NOT NULL, - JavascriptEnable SMALLINT NOT NULL, - IsMobile SMALLINT NOT NULL, - MobilePhone SMALLINT NOT NULL, - MobilePhoneModel STRING NOT NULL, - Params STRING NOT NULL, - IPNetworkID INT NOT NULL, - TraficSourceID SMALLINT NOT NULL, - SearchEngineID SMALLINT NOT NULL, - SearchPhrase STRING NOT NULL, - AdvEngineID SMALLINT NOT NULL, - IsArtifical SMALLINT NOT NULL, - WindowClientWidth SMALLINT NOT NULL, - WindowClientHeight SMALLINT NOT NULL, - ClientTimeZone SMALLINT NOT NULL, - ClientEventTime DateTimev2 NOT NULL, - SilverlightVersion1 SMALLINT NOT NULL, - SilverlightVersion2 SMALLINT NOT NULL, - SilverlightVersion3 INT NOT NULL, - SilverlightVersion4 SMALLINT NOT NULL, - PageCharset STRING NOT NULL, - CodeVersion INT NOT NULL, - IsLink SMALLINT NOT NULL, - IsDownload SMALLINT NOT NULL, - IsNotBounce SMALLINT NOT NULL, - FUniqID BIGINT NOT NULL, - OriginalURL STRING NOT NULL, - HID INT NOT NULL, - IsOldCounter SMALLINT NOT NULL, - IsEvent SMALLINT NOT NULL, - IsParameter SMALLINT NOT NULL, - DontCountHits SMALLINT NOT NULL, - WithHash SMALLINT NOT NULL, - HitColor CHAR NOT NULL, - LocalEventTime DateTimev2 NOT NULL, - Age SMALLINT NOT NULL, - Sex SMALLINT NOT NULL, - Income SMALLINT NOT NULL, - Interests SMALLINT NOT NULL, - Robotness SMALLINT NOT NULL, - RemoteIP INT NOT NULL, - WindowName INT NOT NULL, - OpenerName INT NOT NULL, - HistoryLength SMALLINT NOT NULL, - BrowserLanguage STRING NOT NULL, - BrowserCountry STRING NOT NULL, - SocialNetwork STRING NOT NULL, - SocialAction STRING NOT NULL, - HTTPError SMALLINT NOT NULL, - SendTiming INT NOT NULL, - DNSTiming INT NOT NULL, - ConnectTiming INT NOT NULL, - ResponseStartTiming INT NOT NULL, - ResponseEndTiming INT NOT NULL, - FetchTiming INT NOT NULL, - SocialSourceNetworkID SMALLINT NOT NULL, - SocialSourcePage STRING NOT NULL, - ParamPrice BIGINT NOT NULL, - ParamOrderID STRING NOT NULL, - ParamCurrency STRING NOT NULL, - ParamCurrencyID SMALLINT NOT NULL, - OpenstatServiceName STRING NOT NULL, - OpenstatCampaignID STRING NOT NULL, - OpenstatAdID STRING NOT NULL, - OpenstatSourceID STRING NOT NULL, - UTMSource STRING NOT NULL, - UTMMedium STRING NOT NULL, - UTMCampaign STRING NOT NULL, - UTMContent STRING NOT NULL, - UTMTerm STRING NOT NULL, - FromTag STRING NOT NULL, - HasGCLID SMALLINT NOT NULL, - RefererHash BIGINT NOT NULL, - URLHash BIGINT NOT NULL, - CLID INT NOT NULL - ) - DUPLICATE KEY (CounterID, EventDate, UserID, EventTime, WatchID) - DISTRIBUTED BY HASH(UserID) BUCKETS 48 - PROPERTIES ( "replication_num"="1"); - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query1.groovy b/regression-test/suites/new_shapes_p0/clickbench/query1.groovy deleted file mode 100644 index e1294c1e2563fd..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query1.groovy +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT COUNT(*) FROM hits""" - qt_ckbench_shape_1 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query10.groovy b/regression-test/suites/new_shapes_p0/clickbench/query10.groovy deleted file mode 100644 index 027b06ed7919f5..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query10.groovy +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT RegionID, SUM(AdvEngineID), COUNT(*) AS c, AVG(ResolutionWidth), COUNT(DISTINCT UserID) FROM hits GROUP BY RegionID ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_10 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query11.groovy b/regression-test/suites/new_shapes_p0/clickbench/query11.groovy deleted file mode 100644 index c87be65cb43346..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query11.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query11") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT MobilePhoneModel, COUNT(DISTINCT UserID) AS u FROM hits WHERE MobilePhoneModel <> '' GROUP BY MobilePhoneModel ORDER BY u DESC LIMIT 10""" - qt_ckbench_shape_11 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query12.groovy b/regression-test/suites/new_shapes_p0/clickbench/query12.groovy deleted file mode 100644 index e58d4d3ff0a4dd..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query12.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query12") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT MobilePhone, MobilePhoneModel, COUNT(DISTINCT UserID) AS u FROM hits WHERE MobilePhoneModel <> '' GROUP BY MobilePhone, MobilePhoneModel ORDER BY u DESC LIMIT 10""" - qt_ckbench_shape_12 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query13.groovy b/regression-test/suites/new_shapes_p0/clickbench/query13.groovy deleted file mode 100644 index e156e51bc4cc6d..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query13.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase, COUNT(*) AS c FROM hits WHERE SearchPhrase <> '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_13 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query14.groovy b/regression-test/suites/new_shapes_p0/clickbench/query14.groovy deleted file mode 100644 index 2bb1aa260fe958..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query14.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query14") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase, COUNT(DISTINCT UserID) AS u FROM hits WHERE SearchPhrase <> '' GROUP BY SearchPhrase ORDER BY u DESC LIMIT 10""" - qt_ckbench_shape_14 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query15.groovy b/regression-test/suites/new_shapes_p0/clickbench/query15.groovy deleted file mode 100644 index f8becb72a625d4..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query15.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query15") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchEngineID, SearchPhrase, COUNT(*) AS c FROM hits WHERE SearchPhrase <> '' GROUP BY SearchEngineID, SearchPhrase ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_15 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query16.groovy b/regression-test/suites/new_shapes_p0/clickbench/query16.groovy deleted file mode 100644 index 4d742899e6b704..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query16.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query16") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT UserID, COUNT(*) FROM hits GROUP BY UserID ORDER BY COUNT(*) DESC LIMIT 10""" - qt_ckbench_shape_16 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query17.groovy b/regression-test/suites/new_shapes_p0/clickbench/query17.groovy deleted file mode 100644 index 76307e1167b8ec..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query17.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query17") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT UserID, SearchPhrase, COUNT(*) FROM hits GROUP BY UserID, SearchPhrase ORDER BY COUNT(*) DESC LIMIT 10""" - qt_ckbench_shape_17 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query18.groovy b/regression-test/suites/new_shapes_p0/clickbench/query18.groovy deleted file mode 100644 index 8a9d8f19b619da..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query18.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query18") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT UserID, SearchPhrase, COUNT(*) FROM hits GROUP BY UserID, SearchPhrase LIMIT 10""" - qt_ckbench_shape_18 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query19.groovy b/regression-test/suites/new_shapes_p0/clickbench/query19.groovy deleted file mode 100644 index da080ef696aab9..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query19.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT UserID, extract(minute FROM EventTime) AS m, SearchPhrase, COUNT(*) FROM hits GROUP BY UserID, m, SearchPhrase ORDER BY COUNT(*) DESC LIMIT 10""" - qt_ckbench_shape_19 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query2.groovy b/regression-test/suites/new_shapes_p0/clickbench/query2.groovy deleted file mode 100644 index 0deae902dbd810..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query2.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query2") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """ - SELECT COUNT(*) FROM hits WHERE AdvEngineID <> 0 - """ - qt_ckbench_shape_2 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query20.groovy b/regression-test/suites/new_shapes_p0/clickbench/query20.groovy deleted file mode 100644 index 2d7fc829508e6e..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query20.groovy +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query20") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_parallel_result_sink=false;" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT UserID FROM hits WHERE UserID = 435090932899640449""" - qt_ckbench_shape_20 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query21.groovy b/regression-test/suites/new_shapes_p0/clickbench/query21.groovy deleted file mode 100644 index 870316ee6ea8c6..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query21.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query21") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT COUNT(*) FROM hits WHERE URL LIKE '%google%'""" - qt_ckbench_shape_21 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query22.groovy b/regression-test/suites/new_shapes_p0/clickbench/query22.groovy deleted file mode 100644 index c4e27d56c2b817..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query22.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query22") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase, MIN(URL), COUNT(*) AS c FROM hits WHERE URL LIKE '%google%' AND SearchPhrase <> '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_22 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query23.groovy b/regression-test/suites/new_shapes_p0/clickbench/query23.groovy deleted file mode 100644 index ff19e0ec4ea667..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query23.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase, MIN(URL), MIN(Title), COUNT(*) AS c, COUNT(DISTINCT UserID) FROM hits WHERE Title LIKE '%Google%' AND URL NOT LIKE '%.google.%' AND SearchPhrase <> '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_23 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query24.groovy b/regression-test/suites/new_shapes_p0/clickbench/query24.groovy deleted file mode 100644 index 7ea248701a9b6b..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query24.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT * FROM hits WHERE URL LIKE '%google%' ORDER BY EventTime LIMIT 10""" - qt_ckbench_shape_24 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query25.groovy b/regression-test/suites/new_shapes_p0/clickbench/query25.groovy deleted file mode 100644 index d61c7475d137c5..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query25.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query25") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase FROM hits WHERE SearchPhrase <> '' ORDER BY EventTime LIMIT 10""" - qt_ckbench_shape_25 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query26.groovy b/regression-test/suites/new_shapes_p0/clickbench/query26.groovy deleted file mode 100644 index b5cf08c9482011..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query26.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query26") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase FROM hits WHERE SearchPhrase <> '' ORDER BY SearchPhrase LIMIT 10""" - qt_ckbench_shape_26 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query27.groovy b/regression-test/suites/new_shapes_p0/clickbench/query27.groovy deleted file mode 100644 index e72528e97db520..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query27.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query27") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase FROM hits WHERE SearchPhrase <> '' ORDER BY EventTime, SearchPhrase LIMIT 10""" - qt_ckbench_shape_27 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query28.groovy b/regression-test/suites/new_shapes_p0/clickbench/query28.groovy deleted file mode 100644 index 08c2fa6b2505d5..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query28.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query28") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT CounterID, AVG(length(URL)) AS l, COUNT(*) AS c FROM hits WHERE URL <> '' GROUP BY CounterID HAVING COUNT(*) > 100000 ORDER BY l DESC LIMIT 25""" - qt_ckbench_shape_28 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query29.groovy b/regression-test/suites/new_shapes_p0/clickbench/query29.groovy deleted file mode 100644 index e855ad7ff7bda3..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query29.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query29") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT REGEXP_REPLACE(Referer, '^https?://(?:www\\.)?([^/]+)/.*\$', '\\\\1') AS k, AVG(length(Referer)) AS l, COUNT(*) AS c, MIN(Referer) FROM hits WHERE Referer <> '' GROUP BY k HAVING COUNT(*) > 100000 ORDER BY l DESC LIMIT 25""" - qt_ckbench_shape_29 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query3.groovy b/regression-test/suites/new_shapes_p0/clickbench/query3.groovy deleted file mode 100644 index ac376f98ff2d2a..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query3.groovy +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query3") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SUM(AdvEngineID), COUNT(*), AVG(ResolutionWidth) FROM hits""" - qt_ckbench_shape_3 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query30.groovy b/regression-test/suites/new_shapes_p0/clickbench/query30.groovy deleted file mode 100644 index 1a9b9f914d4147..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query30.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query30") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SUM(ResolutionWidth), SUM(ResolutionWidth + 1), SUM(ResolutionWidth + 2), SUM(ResolutionWidth + 3), SUM(ResolutionWidth + 4), SUM(ResolutionWidth + 5), SUM(ResolutionWidth + 6), SUM(ResolutionWidth + 7), SUM(ResolutionWidth + 8), SUM(ResolutionWidth + 9), SUM(ResolutionWidth + 10), SUM(ResolutionWidth + 11), SUM(ResolutionWidth + 12), SUM(ResolutionWidth + 13), SUM(ResolutionWidth + 14), SUM(ResolutionWidth + 15), SUM(ResolutionWidth + 16), SUM(ResolutionWidth + 17), SUM(ResolutionWidth + 18), SUM(ResolutionWidth + 19), SUM(ResolutionWidth + 20), SUM(ResolutionWidth + 21), SUM(ResolutionWidth + 22), SUM(ResolutionWidth + 23), SUM(ResolutionWidth + 24), SUM(ResolutionWidth + 25), SUM(ResolutionWidth + 26), SUM(ResolutionWidth + 27), SUM(ResolutionWidth + 28), SUM(ResolutionWidth + 29), SUM(ResolutionWidth + 30), SUM(ResolutionWidth + 31), SUM(ResolutionWidth + 32), SUM(ResolutionWidth + 33), SUM(ResolutionWidth + 34), SUM(ResolutionWidth + 35), SUM(ResolutionWidth + 36), SUM(ResolutionWidth + 37), SUM(ResolutionWidth + 38), SUM(ResolutionWidth + 39), SUM(ResolutionWidth + 40), SUM(ResolutionWidth + 41), SUM(ResolutionWidth + 42), SUM(ResolutionWidth + 43), SUM(ResolutionWidth + 44), SUM(ResolutionWidth + 45), SUM(ResolutionWidth + 46), SUM(ResolutionWidth + 47), SUM(ResolutionWidth + 48), SUM(ResolutionWidth + 49), SUM(ResolutionWidth + 50), SUM(ResolutionWidth + 51), SUM(ResolutionWidth + 52), SUM(ResolutionWidth + 53), SUM(ResolutionWidth + 54), SUM(ResolutionWidth + 55), SUM(ResolutionWidth + 56), SUM(ResolutionWidth + 57), SUM(ResolutionWidth + 58), SUM(ResolutionWidth + 59), SUM(ResolutionWidth + 60), SUM(ResolutionWidth + 61), SUM(ResolutionWidth + 62), SUM(ResolutionWidth + 63), SUM(ResolutionWidth + 64), SUM(ResolutionWidth + 65), SUM(ResolutionWidth + 66), SUM(ResolutionWidth + 67), SUM(ResolutionWidth + 68), SUM(ResolutionWidth + 69), SUM(ResolutionWidth + 70), SUM(ResolutionWidth + 71), SUM(ResolutionWidth + 72), SUM(ResolutionWidth + 73), SUM(ResolutionWidth + 74), SUM(ResolutionWidth + 75), SUM(ResolutionWidth + 76), SUM(ResolutionWidth + 77), SUM(ResolutionWidth + 78), SUM(ResolutionWidth + 79), SUM(ResolutionWidth + 80), SUM(ResolutionWidth + 81), SUM(ResolutionWidth + 82), SUM(ResolutionWidth + 83), SUM(ResolutionWidth + 84), SUM(ResolutionWidth + 85), SUM(ResolutionWidth + 86), SUM(ResolutionWidth + 87), SUM(ResolutionWidth + 88), SUM(ResolutionWidth + 89) FROM hits""" - qt_ckbench_shape_30 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query31.groovy b/regression-test/suites/new_shapes_p0/clickbench/query31.groovy deleted file mode 100644 index 4e0dab037fb90e..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query31.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query31") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchEngineID, ClientIP, COUNT(*) AS c, SUM(IsRefresh), AVG(ResolutionWidth) FROM hits WHERE SearchPhrase <> '' GROUP BY SearchEngineID, ClientIP ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_31 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query32.groovy b/regression-test/suites/new_shapes_p0/clickbench/query32.groovy deleted file mode 100644 index a4fc0f66d2d59c..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query32.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query32") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT WatchID, ClientIP, COUNT(*) AS c, SUM(IsRefresh), AVG(ResolutionWidth) FROM hits WHERE SearchPhrase <> '' GROUP BY WatchID, ClientIP ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_32 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query33.groovy b/regression-test/suites/new_shapes_p0/clickbench/query33.groovy deleted file mode 100644 index 31d89c03410dcb..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query33.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query33") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT WatchID, ClientIP, COUNT(*) AS c, SUM(IsRefresh), AVG(ResolutionWidth) FROM hits GROUP BY WatchID, ClientIP ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_33 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query34.groovy b/regression-test/suites/new_shapes_p0/clickbench/query34.groovy deleted file mode 100644 index 144dbd8579ef35..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query34.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query34") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT URL, COUNT(*) AS c FROM hits GROUP BY URL ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_34 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query35.groovy b/regression-test/suites/new_shapes_p0/clickbench/query35.groovy deleted file mode 100644 index 14b91739e89a5a..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query35.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query35") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT 1, URL, COUNT(*) AS c FROM hits GROUP BY 1, URL ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_35 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query36.groovy b/regression-test/suites/new_shapes_p0/clickbench/query36.groovy deleted file mode 100644 index 4067c45ff16b75..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query36.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query36") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT ClientIP, ClientIP - 1, ClientIP - 2, ClientIP - 3, COUNT(*) AS c FROM hits GROUP BY ClientIP, ClientIP - 1, ClientIP - 2, ClientIP - 3 ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_36 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query37.groovy b/regression-test/suites/new_shapes_p0/clickbench/query37.groovy deleted file mode 100644 index c1b3882e96eb00..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query37.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query37") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT URL, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND DontCountHits = 0 AND IsRefresh = 0 AND URL <> '' GROUP BY URL ORDER BY PageViews DESC LIMIT 10""" - qt_ckbench_shape_37 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query38.groovy b/regression-test/suites/new_shapes_p0/clickbench/query38.groovy deleted file mode 100644 index da44d88d84595a..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query38.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query38") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT Title, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND DontCountHits = 0 AND IsRefresh = 0 AND Title <> '' GROUP BY Title ORDER BY PageViews DESC LIMIT 10""" - qt_ckbench_shape_38 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query39.groovy b/regression-test/suites/new_shapes_p0/clickbench/query39.groovy deleted file mode 100644 index 9aaf41f0303988..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query39.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query39") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT URL, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND IsRefresh = 0 AND IsLink <> 0 AND IsDownload = 0 GROUP BY URL ORDER BY PageViews DESC LIMIT 10 OFFSET 1000""" - qt_ckbench_shape_39 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query4.groovy b/regression-test/suites/new_shapes_p0/clickbench/query4.groovy deleted file mode 100644 index 5f2727592daee5..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query4.groovy +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query4") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT AVG(UserID) FROM hits""" - qt_ckbench_shape_4 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query40.groovy b/regression-test/suites/new_shapes_p0/clickbench/query40.groovy deleted file mode 100644 index a256bd9d40dbea..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query40.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query40") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT TraficSourceID, SearchEngineID, AdvEngineID, CASE WHEN (SearchEngineID = 0 AND AdvEngineID = 0) THEN Referer ELSE '' END AS Src, URL AS Dst, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND IsRefresh = 0 GROUP BY TraficSourceID, SearchEngineID, AdvEngineID, Src, Dst ORDER BY PageViews DESC LIMIT 10 OFFSET 1000""" - qt_ckbench_shape_40 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query41.groovy b/regression-test/suites/new_shapes_p0/clickbench/query41.groovy deleted file mode 100644 index e0617d1e5d6a32..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query41.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query41") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT URLHash, EventDate, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND IsRefresh = 0 AND TraficSourceID IN (-1, 6) AND RefererHash = 3594120000172545465 GROUP BY URLHash, EventDate ORDER BY PageViews DESC LIMIT 10 OFFSET 100""" - qt_ckbench_shape_41 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query42.groovy b/regression-test/suites/new_shapes_p0/clickbench/query42.groovy deleted file mode 100644 index cd53a2a7d4d030..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query42.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query42") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT WindowClientWidth, WindowClientHeight, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND IsRefresh = 0 AND DontCountHits = 0 AND URLHash = 2868770270353813622 GROUP BY WindowClientWidth, WindowClientHeight ORDER BY PageViews DESC LIMIT 10 OFFSET 10000""" - qt_ckbench_shape_42 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query43.groovy b/regression-test/suites/new_shapes_p0/clickbench/query43.groovy deleted file mode 100644 index 8de968093369a9..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query43.groovy +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query43") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT DATE_FORMAT(EventTime, '%Y-%m-%d %H:%i:00') AS M, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-14' AND EventDate <= '2013-07-15' AND IsRefresh = 0 AND DontCountHits = 0 GROUP BY DATE_FORMAT(EventTime, '%Y-%m-%d %H:%i:00') ORDER BY DATE_FORMAT(EventTime, '%Y-%m-%d %H:%i:00') LIMIT 10 OFFSET 1000""" - qt_ckbench_shape_43 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query5.groovy b/regression-test/suites/new_shapes_p0/clickbench/query5.groovy deleted file mode 100644 index cf727f94101dee..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query5.groovy +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query5") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT COUNT(DISTINCT UserID) FROM hits""" - qt_ckbench_shape_5 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query6.groovy b/regression-test/suites/new_shapes_p0/clickbench/query6.groovy deleted file mode 100644 index 3ec9bbbe25b2d3..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query6.groovy +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT COUNT(DISTINCT SearchPhrase) FROM hits""" - qt_ckbench_shape_6 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query7.groovy b/regression-test/suites/new_shapes_p0/clickbench/query7.groovy deleted file mode 100644 index 1c9d80a33709f8..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query7.groovy +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query7") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT MIN(EventDate), MAX(EventDate) FROM hits""" - qt_ckbench_shape_7 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query8.groovy b/regression-test/suites/new_shapes_p0/clickbench/query8.groovy deleted file mode 100644 index 36c1322ff5a7ef..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query8.groovy +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT AdvEngineID, COUNT(*) FROM hits WHERE AdvEngineID <> 0 GROUP BY AdvEngineID ORDER BY COUNT(*) DESC""" - qt_ckbench_shape_8 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query9.groovy b/regression-test/suites/new_shapes_p0/clickbench/query9.groovy deleted file mode 100644 index 91a8184de49c97..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query9.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query9") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT RegionID, COUNT(DISTINCT UserID) AS u FROM hits GROUP BY RegionID ORDER BY u DESC LIMIT 10 -""" - qt_ckbench_shape_9 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/ddl/gen_shape.py b/regression-test/suites/new_shapes_p0/hint_tpcds/ddl/gen_shape.py deleted file mode 100644 index 8317bd1859f261..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/ddl/gen_shape.py +++ /dev/null @@ -1,26 +0,0 @@ -# // Licensed to the Apache Software Foundation (ASF) under one -# // or more contributor license agreements. See the NOTICE file -# // distributed with this work for additional information -# // regarding copyright ownership. The ASF licenses this file -# // to you under the Apache License, Version 2.0 (the -# // "License"); you may not use this file except in compliance -# // with the License. You may obtain a copy of the License at -# // -# // http://www.apache.org/licenses/LICENSE-2.0 -# // -# // Unless required by applicable law or agreed to in writing, -# // software distributed under the License is distributed on an -# // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# // KIND, either express or implied. See the License for the -# // specific language governing permissions and limitations -# // under the License. -if __name__ == '__main__': - with open('shape.tmpl', 'r') as f: - tmpl = f.read() - for i in range(1,100): - with open('../../../../tools/tpcds-tools/queries/sf1000/query'+str(i)+'.sql', 'r') as fi: - casei = tmpl.replace('{--}', str(i)) - casei = casei.replace('{query}', fi.read().split(";")[0]) - - with open('../shape/query'+str(i)+'.groovy', 'w') as out: - out.write(casei) \ No newline at end of file diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/ddl/shape.tmpl b/regression-test/suites/new_shapes_p0/hint_tpcds/ddl/shape.tmpl deleted file mode 100644 index c25fd3f36b03f4..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/ddl/shape.tmpl +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query{--}") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - def ds = """{query}""" - qt_ds_shape_{--} ''' - explain shape plan - {query} - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/load.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/load.groovy deleted file mode 100644 index b2afbd31d7b5a2..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/load.groovy +++ /dev/null @@ -1,812 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - - sql ''' - drop table if exists customer_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_demographics ( - cd_demo_sk bigint not null, - cd_gender char(1), - cd_marital_status char(1), - cd_education_status char(20), - cd_purchase_estimate integer, - cd_credit_rating char(10), - cd_dep_count integer, - cd_dep_employed_count integer, - cd_dep_college_count integer - ) - DUPLICATE KEY(cd_demo_sk) - DISTRIBUTED BY HASH(cd_gender) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists reason - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS reason ( - r_reason_sk bigint not null, - r_reason_id char(16) not null, - r_reason_desc char(100) - ) - DUPLICATE KEY(r_reason_sk) - DISTRIBUTED BY HASH(r_reason_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists date_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS date_dim ( - d_date_sk bigint not null, - d_date_id char(16) not null, - d_date datev2, - d_month_seq integer, - d_week_seq integer, - d_quarter_seq integer, - d_year integer, - d_dow integer, - d_moy integer, - d_dom integer, - d_qoy integer, - d_fy_year integer, - d_fy_quarter_seq integer, - d_fy_week_seq integer, - d_day_name char(9), - d_quarter_name char(6), - d_holiday char(1), - d_weekend char(1), - d_following_holiday char(1), - d_first_dom integer, - d_last_dom integer, - d_same_day_ly integer, - d_same_day_lq integer, - d_current_day char(1), - d_current_week char(1), - d_current_month char(1), - d_current_quarter char(1), - d_current_year char(1) - ) - DUPLICATE KEY(d_date_sk) - DISTRIBUTED BY HASH(d_date_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists warehouse - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS warehouse ( - w_warehouse_sk bigint not null, - w_warehouse_id char(16) not null, - w_warehouse_name varchar(20), - w_warehouse_sq_ft integer, - w_street_number char(10), - w_street_name varchar(60), - w_street_type char(15), - w_suite_number char(10), - w_city varchar(60), - w_county varchar(30), - w_state char(2), - w_zip char(10), - w_country varchar(20), - w_gmt_offset decimalv3(5,2) - ) - DUPLICATE KEY(w_warehouse_sk) - DISTRIBUTED BY HASH(w_warehouse_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_sales ( - cs_sold_date_sk bigint, - cs_item_sk bigint not null, - cs_order_number bigint not null, - cs_sold_time_sk bigint, - cs_ship_date_sk bigint, - cs_bill_customer_sk bigint, - cs_bill_cdemo_sk bigint, - cs_bill_hdemo_sk bigint, - cs_bill_addr_sk bigint, - cs_ship_customer_sk bigint, - cs_ship_cdemo_sk bigint, - cs_ship_hdemo_sk bigint, - cs_ship_addr_sk bigint, - cs_call_center_sk bigint, - cs_catalog_page_sk bigint, - cs_ship_mode_sk bigint, - cs_warehouse_sk bigint, - cs_promo_sk bigint, - cs_quantity integer, - cs_wholesale_cost decimalv3(7,2), - cs_list_price decimalv3(7,2), - cs_sales_price decimalv3(7,2), - cs_ext_discount_amt decimalv3(7,2), - cs_ext_sales_price decimalv3(7,2), - cs_ext_wholesale_cost decimalv3(7,2), - cs_ext_list_price decimalv3(7,2), - cs_ext_tax decimalv3(7,2), - cs_coupon_amt decimalv3(7,2), - cs_ext_ship_cost decimalv3(7,2), - cs_net_paid decimalv3(7,2), - cs_net_paid_inc_tax decimalv3(7,2), - cs_net_paid_inc_ship decimalv3(7,2), - cs_net_paid_inc_ship_tax decimalv3(7,2), - cs_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(cs_sold_date_sk, cs_item_sk) - DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists call_center - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS call_center ( - cc_call_center_sk bigint not null, - cc_call_center_id char(16) not null, - cc_rec_start_date datev2, - cc_rec_end_date datev2, - cc_closed_date_sk integer, - cc_open_date_sk integer, - cc_name varchar(50), - cc_class varchar(50), - cc_employees integer, - cc_sq_ft integer, - cc_hours char(20), - cc_manager varchar(40), - cc_mkt_id integer, - cc_mkt_class char(50), - cc_mkt_desc varchar(100), - cc_market_manager varchar(40), - cc_division integer, - cc_division_name varchar(50), - cc_company integer, - cc_company_name char(50), - cc_street_number char(10), - cc_street_name varchar(60), - cc_street_type char(15), - cc_suite_number char(10), - cc_city varchar(60), - cc_county varchar(30), - cc_state char(2), - cc_zip char(10), - cc_country varchar(20), - cc_gmt_offset decimalv3(5,2), - cc_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(cc_call_center_sk) - DISTRIBUTED BY HASH(cc_call_center_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists inventory - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS inventory ( - inv_date_sk bigint not null, - inv_item_sk bigint not null, - inv_warehouse_sk bigint, - inv_quantity_on_hand integer - ) - DUPLICATE KEY(inv_date_sk, inv_item_sk, inv_warehouse_sk) - DISTRIBUTED BY HASH(inv_date_sk, inv_item_sk, inv_warehouse_sk) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_returns ( - cr_item_sk bigint not null, - cr_order_number bigint not null, - cr_returned_date_sk bigint, - cr_returned_time_sk bigint, - cr_refunded_customer_sk bigint, - cr_refunded_cdemo_sk bigint, - cr_refunded_hdemo_sk bigint, - cr_refunded_addr_sk bigint, - cr_returning_customer_sk bigint, - cr_returning_cdemo_sk bigint, - cr_returning_hdemo_sk bigint, - cr_returning_addr_sk bigint, - cr_call_center_sk bigint, - cr_catalog_page_sk bigint, - cr_ship_mode_sk bigint, - cr_warehouse_sk bigint, - cr_reason_sk bigint, - cr_return_quantity integer, - cr_return_amount decimalv3(7,2), - cr_return_tax decimalv3(7,2), - cr_return_amt_inc_tax decimalv3(7,2), - cr_fee decimalv3(7,2), - cr_return_ship_cost decimalv3(7,2), - cr_refunded_cash decimalv3(7,2), - cr_reversed_charge decimalv3(7,2), - cr_store_credit decimalv3(7,2), - cr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(cr_item_sk, cr_order_number) - DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists household_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS household_demographics ( - hd_demo_sk bigint not null, - hd_income_band_sk bigint, - hd_buy_potential char(15), - hd_dep_count integer, - hd_vehicle_count integer - ) - DUPLICATE KEY(hd_demo_sk) - DISTRIBUTED BY HASH(hd_demo_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer_address - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_address ( - ca_address_sk bigint not null, - ca_address_id char(16) not null, - ca_street_number char(10), - ca_street_name varchar(60), - ca_street_type char(15), - ca_suite_number char(10), - ca_city varchar(60), - ca_county varchar(30), - ca_state char(2), - ca_zip char(10), - ca_country varchar(20), - ca_gmt_offset decimalv3(5,2), - ca_location_type char(20) - ) - DUPLICATE KEY(ca_address_sk) - DISTRIBUTED BY HASH(ca_address_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists income_band - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS income_band ( - ib_income_band_sk bigint not null, - ib_lower_bound integer, - ib_upper_bound integer - ) - DUPLICATE KEY(ib_income_band_sk) - DISTRIBUTED BY HASH(ib_income_band_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_page ( - cp_catalog_page_sk bigint not null, - cp_catalog_page_id char(16) not null, - cp_start_date_sk integer, - cp_end_date_sk integer, - cp_department varchar(50), - cp_catalog_number integer, - cp_catalog_page_number integer, - cp_description varchar(100), - cp_type varchar(100) - ) - DUPLICATE KEY(cp_catalog_page_sk) - DISTRIBUTED BY HASH(cp_catalog_page_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists item - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS item ( - i_item_sk bigint not null, - i_item_id char(16) not null, - i_rec_start_date datev2, - i_rec_end_date datev2, - i_item_desc varchar(200), - i_current_price decimalv3(7,2), - i_wholesale_cost decimalv3(7,2), - i_brand_id integer, - i_brand char(50), - i_class_id integer, - i_class char(50), - i_category_id integer, - i_category char(50), - i_manufact_id integer, - i_manufact char(50), - i_size char(20), - i_formulation char(20), - i_color char(20), - i_units char(10), - i_container char(10), - i_manager_id integer, - i_product_name char(50) - ) - DUPLICATE KEY(i_item_sk) - DISTRIBUTED BY HASH(i_item_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_returns ( - wr_item_sk bigint not null, - wr_order_number bigint not null, - wr_returned_date_sk bigint, - wr_returned_time_sk bigint, - wr_refunded_customer_sk bigint, - wr_refunded_cdemo_sk bigint, - wr_refunded_hdemo_sk bigint, - wr_refunded_addr_sk bigint, - wr_returning_customer_sk bigint, - wr_returning_cdemo_sk bigint, - wr_returning_hdemo_sk bigint, - wr_returning_addr_sk bigint, - wr_web_page_sk bigint, - wr_reason_sk bigint, - wr_return_quantity integer, - wr_return_amt decimalv3(7,2), - wr_return_tax decimalv3(7,2), - wr_return_amt_inc_tax decimalv3(7,2), - wr_fee decimalv3(7,2), - wr_return_ship_cost decimalv3(7,2), - wr_refunded_cash decimalv3(7,2), - wr_reversed_charge decimalv3(7,2), - wr_account_credit decimalv3(7,2), - wr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(wr_item_sk, wr_order_number) - DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists web_site - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_site ( - web_site_sk bigint not null, - web_site_id char(16) not null, - web_rec_start_date datev2, - web_rec_end_date datev2, - web_name varchar(50), - web_open_date_sk bigint, - web_close_date_sk bigint, - web_class varchar(50), - web_manager varchar(40), - web_mkt_id integer, - web_mkt_class varchar(50), - web_mkt_desc varchar(100), - web_market_manager varchar(40), - web_company_id integer, - web_company_name char(50), - web_street_number char(10), - web_street_name varchar(60), - web_street_type char(15), - web_suite_number char(10), - web_city varchar(60), - web_county varchar(30), - web_state char(2), - web_zip char(10), - web_country varchar(20), - web_gmt_offset decimalv3(5,2), - web_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(web_site_sk) - DISTRIBUTED BY HASH(web_site_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists promotion - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS promotion ( - p_promo_sk bigint not null, - p_promo_id char(16) not null, - p_start_date_sk bigint, - p_end_date_sk bigint, - p_item_sk bigint, - p_cost decimalv3(15,2), - p_response_targe integer, - p_promo_name char(50), - p_channel_dmail char(1), - p_channel_email char(1), - p_channel_catalog char(1), - p_channel_tv char(1), - p_channel_radio char(1), - p_channel_press char(1), - p_channel_event char(1), - p_channel_demo char(1), - p_channel_details varchar(100), - p_purpose char(15), - p_discount_active char(1) - ) - DUPLICATE KEY(p_promo_sk) - DISTRIBUTED BY HASH(p_promo_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_sales ( - ws_sold_date_sk bigint, - ws_item_sk bigint not null, - ws_order_number bigint not null, - ws_sold_time_sk bigint, - ws_ship_date_sk bigint, - ws_bill_customer_sk bigint, - ws_bill_cdemo_sk bigint, - ws_bill_hdemo_sk bigint, - ws_bill_addr_sk bigint, - ws_ship_customer_sk bigint, - ws_ship_cdemo_sk bigint, - ws_ship_hdemo_sk bigint, - ws_ship_addr_sk bigint, - ws_web_page_sk bigint, - ws_web_site_sk bigint, - ws_ship_mode_sk bigint, - ws_warehouse_sk bigint, - ws_promo_sk bigint, - ws_quantity integer, - ws_wholesale_cost decimalv3(7,2), - ws_list_price decimalv3(7,2), - ws_sales_price decimalv3(7,2), - ws_ext_discount_amt decimalv3(7,2), - ws_ext_sales_price decimalv3(7,2), - ws_ext_wholesale_cost decimalv3(7,2), - ws_ext_list_price decimalv3(7,2), - ws_ext_tax decimalv3(7,2), - ws_coupon_amt decimalv3(7,2), - ws_ext_ship_cost decimalv3(7,2), - ws_net_paid decimalv3(7,2), - ws_net_paid_inc_tax decimalv3(7,2), - ws_net_paid_inc_ship decimalv3(7,2), - ws_net_paid_inc_ship_tax decimalv3(7,2), - ws_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ws_sold_date_sk, ws_item_sk) - DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists store - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store ( - s_store_sk bigint not null, - s_store_id char(16) not null, - s_rec_start_date datev2, - s_rec_end_date datev2, - s_closed_date_sk bigint, - s_store_name varchar(50), - s_number_employees integer, - s_floor_space integer, - s_hours char(20), - s_manager varchar(40), - s_market_id integer, - s_geography_class varchar(100), - s_market_desc varchar(100), - s_market_manager varchar(40), - s_division_id integer, - s_division_name varchar(50), - s_company_id integer, - s_company_name varchar(50), - s_street_number varchar(10), - s_street_name varchar(60), - s_street_type char(15), - s_suite_number char(10), - s_city varchar(60), - s_county varchar(30), - s_state char(2), - s_zip char(10), - s_country varchar(20), - s_gmt_offset decimalv3(5,2), - s_tax_precentage decimalv3(5,2) - ) - DUPLICATE KEY(s_store_sk) - DISTRIBUTED BY HASH(s_store_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists time_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS time_dim ( - t_time_sk bigint not null, - t_time_id char(16) not null, - t_time integer, - t_hour integer, - t_minute integer, - t_second integer, - t_am_pm char(2), - t_shift char(20), - t_sub_shift char(20), - t_meal_time char(20) - ) - DUPLICATE KEY(t_time_sk) - DISTRIBUTED BY HASH(t_time_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_page ( - wp_web_page_sk bigint not null, - wp_web_page_id char(16) not null, - wp_rec_start_date datev2, - wp_rec_end_date datev2, - wp_creation_date_sk bigint, - wp_access_date_sk bigint, - wp_autogen_flag char(1), - wp_customer_sk bigint, - wp_url varchar(100), - wp_type char(50), - wp_char_count integer, - wp_link_count integer, - wp_image_count integer, - wp_max_ad_count integer - ) - DUPLICATE KEY(wp_web_page_sk) - DISTRIBUTED BY HASH(wp_web_page_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists store_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_returns ( - sr_item_sk bigint not null, - sr_ticket_number bigint not null, - sr_returned_date_sk bigint, - sr_return_time_sk bigint, - sr_customer_sk bigint, - sr_cdemo_sk bigint, - sr_hdemo_sk bigint, - sr_addr_sk bigint, - sr_store_sk bigint, - sr_reason_sk bigint, - sr_return_quantity integer, - sr_return_amt decimalv3(7,2), - sr_return_tax decimalv3(7,2), - sr_return_amt_inc_tax decimalv3(7,2), - sr_fee decimalv3(7,2), - sr_return_ship_cost decimalv3(7,2), - sr_refunded_cash decimalv3(7,2), - sr_reversed_charge decimalv3(7,2), - sr_store_credit decimalv3(7,2), - sr_net_loss decimalv3(7,2) - ) - duplicate key(sr_item_sk, sr_ticket_number) - distributed by hash (sr_item_sk, sr_ticket_number) buckets 32 - properties ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists store_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_sales ( - ss_sold_date_sk bigint, - ss_item_sk bigint not null, - ss_ticket_number bigint not null, - ss_sold_time_sk bigint, - ss_customer_sk bigint, - ss_cdemo_sk bigint, - ss_hdemo_sk bigint, - ss_addr_sk bigint, - ss_store_sk bigint, - ss_promo_sk bigint, - ss_quantity integer, - ss_wholesale_cost decimalv3(7,2), - ss_list_price decimalv3(7,2), - ss_sales_price decimalv3(7,2), - ss_ext_discount_amt decimalv3(7,2), - ss_ext_sales_price decimalv3(7,2), - ss_ext_wholesale_cost decimalv3(7,2), - ss_ext_list_price decimalv3(7,2), - ss_ext_tax decimalv3(7,2), - ss_coupon_amt decimalv3(7,2), - ss_net_paid decimalv3(7,2), - ss_net_paid_inc_tax decimalv3(7,2), - ss_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ss_sold_date_sk, ss_item_sk) - DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists ship_mode - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS ship_mode ( - sm_ship_mode_sk bigint not null, - sm_ship_mode_id char(16) not null, - sm_type char(30), - sm_code char(10), - sm_carrier char(20), - sm_contract char(20) - ) - DUPLICATE KEY(sm_ship_mode_sk) - DISTRIBUTED BY HASH(sm_ship_mode_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer ( - c_customer_sk bigint not null, - c_customer_id char(16) not null, - c_current_cdemo_sk bigint, - c_current_hdemo_sk bigint, - c_current_addr_sk bigint, - c_first_shipto_date_sk bigint, - c_first_sales_date_sk bigint, - c_salutation char(10), - c_first_name char(20), - c_last_name char(30), - c_preferred_cust_flag char(1), - c_birth_day integer, - c_birth_month integer, - c_birth_year integer, - c_birth_country varchar(20), - c_login char(13), - c_email_address char(50), - c_last_review_date_sk bigint - ) - DUPLICATE KEY(c_customer_sk) - DISTRIBUTED BY HASH(c_customer_id) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists dbgen_version - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS dbgen_version - ( - dv_version varchar(16) , - dv_create_date datev2 , - dv_create_time datetime , - dv_cmdline_args varchar(200) - ) - DUPLICATE KEY(dv_version) - DISTRIBUTED BY HASH(dv_version) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query1.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query1.groovy deleted file mode 100644 index d43c8dfb357846..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query1.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'TN' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100""" - qt_ds_shape_1 ''' - explain shape plan - with customer_total_return as -(select -/*+ leading(store_returns broadcast date_dim) */ -sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select - c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'TN' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query24.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query24.groovy deleted file mode 100644 index 01b4beedbdf434..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query24.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with ssales as -(select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_paid) netpaid -from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address -where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip -and s_market_id=5 -group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) -select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid -from ssales -where i_color = 'aquamarine' -group by c_last_name - ,c_first_name - ,s_store_name -having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) -order by c_last_name - ,c_first_name - ,s_store_name -""" - qt_ds_shape_24 ''' - explain shape plan - with ssales as -(select -/*+ leading(store_sales broadcast store shuffle {customer shuffle customer_address} shuffle item shuffle store_returns) */ -c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_paid) netpaid -from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address -where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip -and s_market_id=5 -group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) -select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid -from ssales -where i_color = 'aquamarine' -group by c_last_name - ,c_first_name - ,s_store_name -having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) -order by c_last_name - ,c_first_name - ,s_store_name - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query64.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query64.groovy deleted file mode 100644 index 950e9416d2c47e..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query64.groovy +++ /dev/null @@ -1,284 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query64") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), -cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('orange','lace','lawn','misty','blush','pink') and - i_current_price between 48 and 48 + 10 and - i_current_price between 48 + 1 and 48 + 15 -group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year -) -select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt -from cross_sales cs1,cross_sales cs2 -where cs1.item_sk=cs2.item_sk and - cs1.syear = 1999 and - cs2.syear = 1999 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip -order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1""" - qt_ds_shape_64 ''' - explain shape plan - with cs_ui as - (select - /*+ leading(catalog_sales shuffle catalog_returns) */ - cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), -cross_sales as - (select - /*+ leading( {store_sales {{customer d2} cd2}} cd1 d3 item {hd1 ib1} store_returns ad1 hd2 ad2 ib2 d1 store promotion cs_ui) */ - i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('orange','lace','lawn','misty','blush','pink') and - i_current_price between 48 and 48 + 10 and - i_current_price between 48 + 1 and 48 + 15 -group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year -) -select -/*+ leading(cs1 shuffle cs2) */ - cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt -from cross_sales cs1,cross_sales cs2 -where cs1.item_sk=cs2.item_sk and - cs1.syear = 1999 and - cs2.syear = 1999 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip -order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query67.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query67.groovy deleted file mode 100644 index ce258f814d8047..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query67.groovy +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query67") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1217 and 1217+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100""" - qt_ds_shape_67 ''' - explain shape plan - select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select - /*+ leading(store_sales broadcast date_dim broadcast store broadcast item) */ - i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1217 and 1217+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query72.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query72.groovy deleted file mode 100644 index 76efe58c0ad620..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query72.groovy +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query72") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and (d3.d_date > (d1.d_date + INTERVAL '5' DAY)) - and hd_buy_potential = '1001-5000' - and d1.d_year = 1998 - and cd_marital_status = 'S' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100""" - qt_ds_shape_72 ''' - explain shape plan - select - /*+ leading( inventory shuffle { catalog_returns shuffle {catalog_sales shuffle {d3 broadcast d1} broadcast household_demographics shuffle customer_demographics broadcast promotion shuffle item} broadcast d2} broadcast warehouse) */ - i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and (d3.d_date > (d1.d_date + INTERVAL '5' DAY)) - and hd_buy_potential = '1001-5000' - and d1.d_year = 1998 - and cd_marital_status = 'S' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query78.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query78.groovy deleted file mode 100644 index a96d26a95caf78..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query78.groovy +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query78") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null and d_year=1998 - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null and d_year=1998 - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null and d_year=1998 - group by d_year, ss_item_sk, ss_customer_sk - ) -select -ss_customer_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=1998 -order by - ss_customer_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100""" - qt_ds_shape_78 ''' - explain shape plan - with ws as - (select - /*+ leading(web_sales broadcast date_dim web_returns) */ - d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null and d_year=1998 - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select - /*+ leading(catalog_sales broadcast date_dim catalog_returns) */ - d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null and d_year=1998 - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select - /*+ leading(store_sales broadcast date_dim store_returns) */ - d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null and d_year=1998 - group by d_year, ss_item_sk, ss_customer_sk - ) -select -/*+ leading(ss shuffle ws shuffle cs) */ -ss_customer_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=1998 -order by - ss_customer_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/load.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/load.groovy deleted file mode 100644 index fc89f02d977e9c..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/load.groovy +++ /dev/null @@ -1,226 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - sql """ - drop table if exists lineitem; - """ - sql """ - CREATE TABLE lineitem ( - l_shipdate DATEV2 NOT NULL, - l_orderkey bigint NOT NULL, - l_linenumber int not null, - l_partkey int NOT NULL, - l_suppkey int not null, - l_quantity decimal(15, 2) NOT NULL, - l_extendedprice decimal(15, 2) NOT NULL, - l_discount decimal(15, 2) NOT NULL, - l_tax decimal(15, 2) NOT NULL, - l_returnflag VARCHAR(1) NOT NULL, - l_linestatus VARCHAR(1) NOT NULL, - l_commitdate DATEV2 NOT NULL, - l_receiptdate DATEV2 NOT NULL, - l_shipinstruct VARCHAR(25) NOT NULL, - l_shipmode VARCHAR(10) NOT NULL, - l_comment VARCHAR(44) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`l_shipdate`, `l_orderkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 96 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "lineitem_orders" - ); - """ - - sql """ - drop table if exists orders; - """ - - sql ''' - CREATE TABLE orders ( - o_orderkey bigint NOT NULL, - o_orderdate DATEV2 NOT NULL, - o_custkey int NOT NULL, - o_orderstatus VARCHAR(1) NOT NULL, - o_totalprice decimal(15, 2) NOT NULL, - o_orderpriority VARCHAR(15) NOT NULL, - o_clerk VARCHAR(15) NOT NULL, - o_shippriority int NOT NULL, - o_comment VARCHAR(79) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`o_orderkey`, `o_orderdate`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`o_orderkey`) BUCKETS 96 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "lineitem_orders" - ); ''' - - sql ''' - drop table if exists partsupp; - ''' - - sql ''' - CREATE TABLE partsupp ( - ps_partkey int NOT NULL, - ps_suppkey int NOT NULL, - ps_availqty int NOT NULL, - ps_supplycost decimal(15, 2) NOT NULL, - ps_comment VARCHAR(199) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`ps_partkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`ps_partkey`) BUCKETS 24 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "part_partsupp" - ); - ''' - - sql ''' - drop table if exists part; - ''' - - sql ''' - CREATE TABLE part ( - p_partkey int NOT NULL, - p_name VARCHAR(55) NOT NULL, - p_mfgr VARCHAR(25) NOT NULL, - p_brand VARCHAR(10) NOT NULL, - p_type VARCHAR(25) NOT NULL, - p_size int NOT NULL, - p_container VARCHAR(10) NOT NULL, - p_retailprice decimal(15, 2) NOT NULL, - p_comment VARCHAR(23) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`p_partkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`p_partkey`) BUCKETS 24 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "part_partsupp" - ); - ''' - - sql ''' - drop table if exists customer; - ''' - - sql ''' - CREATE TABLE customer ( - c_custkey int NOT NULL, - c_name VARCHAR(25) NOT NULL, - c_address VARCHAR(40) NOT NULL, - c_nationkey int NOT NULL, - c_phone VARCHAR(15) NOT NULL, - c_acctbal decimal(15, 2) NOT NULL, - c_mktsegment VARCHAR(10) NOT NULL, - c_comment VARCHAR(117) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`c_custkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 24 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop table if exists supplier - ''' - - sql ''' - CREATE TABLE supplier ( - s_suppkey int NOT NULL, - s_name VARCHAR(25) NOT NULL, - s_address VARCHAR(40) NOT NULL, - s_nationkey int NOT NULL, - s_phone VARCHAR(15) NOT NULL, - s_acctbal decimal(15, 2) NOT NULL, - s_comment VARCHAR(101) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`s_suppkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop table if exists nation; - ''' - - sql ''' - CREATE TABLE `nation` ( - `n_nationkey` int(11) NOT NULL, - `n_name` varchar(25) NOT NULL, - `n_regionkey` int(11) NOT NULL, - `n_comment` varchar(152) NULL - ) ENGINE=OLAP - DUPLICATE KEY(`N_NATIONKEY`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`N_NATIONKEY`) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop table if exists region; - ''' - - sql ''' - CREATE TABLE region ( - r_regionkey int NOT NULL, - r_name VARCHAR(25) NOT NULL, - r_comment VARCHAR(152) - )ENGINE=OLAP - DUPLICATE KEY(`r_regionkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`r_regionkey`) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop view if exists revenue0; - ''' - - sql ''' - create view revenue0 (supplier_no, total_revenue) as - select - l_suppkey, - sum(l_extendedprice * (1 - l_discount)) - from - lineitem - where - l_shipdate >= date '1996-01-01' - and l_shipdate < date '1996-01-01' + interval '3' month - group by - l_suppkey; - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q10.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q10.groovy deleted file mode 100644 index 9322e2485a0221..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q10.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading(lineitem shuffle {{customer shuffle orders} broadcast nation}) */ - c_custkey, - c_name, - sum(l_extendedprice * (1 - l_discount)) as revenue, - c_acctbal, - n_name, - c_address, - c_phone, - c_comment - from - customer, - orders, - lineitem, - nation - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate >= date '1993-10-01' - and o_orderdate < date '1993-10-01' + interval '3' month - and l_returnflag = 'R' - and c_nationkey = n_nationkey - group by - c_custkey, - c_name, - c_acctbal, - c_phone, - n_name, - c_address, - c_comment - order by - revenue desc - limit 20; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q11.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q11.groovy deleted file mode 100644 index 1581c654ecdc73..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q11.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" - sql 'set enable_runtime_filter_prune=false' - - - - qt_select """ - explain shape plan - select - /*+ leading(partsupp {supplier nation}) */ - ps_partkey, - sum(ps_supplycost * ps_availqty) as value - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - group by - ps_partkey having - sum(ps_supplycost * ps_availqty) > ( - select - /*+ leading(partsupp {supplier nation}) */ - sum(ps_supplycost * ps_availqty) * 0.000002 - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - ) - order by - value desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q12.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q12.groovy deleted file mode 100644 index c55966a2bfb546..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q12.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading(orders lineitem) */ - l_shipmode, - sum(case - when o_orderpriority = '1-URGENT' - or o_orderpriority = '2-HIGH' - then 1 - else 0 - end) as high_line_count, - sum(case - when o_orderpriority <> '1-URGENT' - and o_orderpriority <> '2-HIGH' - then 1 - else 0 - end) as low_line_count - from - orders, - lineitem - where - o_orderkey = l_orderkey - and l_shipmode in ('MAIL', 'SHIP') - and l_commitdate < l_receiptdate - and l_shipdate < l_commitdate - and l_receiptdate >= date '1994-01-01' - and l_receiptdate < date '1994-01-01' + interval '1' year - group by - l_shipmode - order by - l_shipmode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q13.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q13.groovy deleted file mode 100644 index 54a83a15265010..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q13.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading(orders shuffle customer) */ - c_count, - count(*) as custdist - from - ( - select - c_custkey, - count(o_orderkey) as c_count - from - customer left outer join orders on - c_custkey = o_custkey - and o_comment not like '%special%requests%' - group by - c_custkey - ) as c_orders - group by - c_count - order by - custdist desc, - c_count desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q14.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q14.groovy deleted file mode 100644 index d1a05921a7ddd7..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q14.groovy +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading(part lineitem) */ - 100.00 * sum(case - when p_type like 'PROMO%' - then l_extendedprice * (1 - l_discount) - else 0 - end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue - from - lineitem, - part - where - l_partkey = p_partkey - and l_shipdate >= date '1995-09-01' - and l_shipdate < date '1995-09-01' + interval '1' month; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q15.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q15.groovy deleted file mode 100644 index 70bc802e3220a7..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q15.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" - sql 'set enable_runtime_filter_prune=false' - - qt_select """ - explain shape plan - select - /*+ leading(supplier revenue0) */ - s_suppkey, - s_name, - s_address, - s_phone, - total_revenue - from - supplier, - revenue0 - where - s_suppkey = supplier_no - and total_revenue = ( - select - max(total_revenue) - from - revenue0 - ) - order by - s_suppkey; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q17.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q17.groovy deleted file mode 100644 index aa595d59bce9ed..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q17.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" - sql 'set enable_runtime_filter_prune=false' - - qt_select """ - explain shape plan - select - /*+ leading(lineitem broadcast part) */ - sum(l_extendedprice) / 7.0 as avg_yearly - from - lineitem, - part - where - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container = 'MED BOX' - and l_quantity < ( - select - 0.2 * avg(l_quantity) - from - lineitem - where - l_partkey = p_partkey - ); - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q19.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q19.groovy deleted file mode 100644 index ce166235d63322..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q19.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading(lineitem broadcast part) */ - sum(l_extendedprice* (1 - l_discount)) as revenue - from - lineitem, - part - where - ( - p_partkey = l_partkey - and p_brand = 'Brand#12' - and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG') - and l_quantity >= 1 and l_quantity <= 1 + 10 - and p_size between 1 and 5 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK') - and l_quantity >= 10 and l_quantity <= 10 + 10 - and p_size between 1 and 10 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#34' - and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG') - and l_quantity >= 20 and l_quantity <= 20 + 10 - and p_size between 1 and 15 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ); - - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q3.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q3.groovy deleted file mode 100644 index 543193d069821d..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q3.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - // db = "tpch" - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set runtime_filter_mode=OFF' - - - sql 'set be_number_for_test=3' - - - qt_select """ - explain shape plan - select - /*+ leading(lineitem {orders shuffle customer}) */ - l_orderkey, - sum(l_extendedprice * (1 - l_discount)) as revenue, - o_orderdate, - o_shippriority - from - customer, - orders, - lineitem - where - c_mktsegment = 'BUILDING' - and c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate < date '1995-03-15' - and l_shipdate > date '1995-03-15' - group by - l_orderkey, - o_orderdate, - o_shippriority - order by - revenue desc, - o_orderdate - limit 10; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q4.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q4.groovy deleted file mode 100644 index fd004fe6981d4a..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q4.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set runtime_filter_mode=OFF' - - - - -sql 'set be_number_for_test=3' - - - qt_select """ - explain shape plan - select - /*+ leading(lineitem orders) */ - o_orderpriority, - count(*) as order_count - from - orders - where - o_orderdate >= date '1993-07-01' - and o_orderdate < date '1993-07-01' + interval '3' month - and exists ( - select - * - from - lineitem - where - l_orderkey = o_orderkey - and l_commitdate < l_receiptdate - ) - group by - o_orderpriority - order by - o_orderpriority; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q5.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q5.groovy deleted file mode 100644 index 0e1527e34a44ad..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q5.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading(lineitem orders broadcast {supplier broadcast {nation broadcast region}} shuffle customer) */ - n_name, - sum(l_extendedprice * (1 - l_discount)) as revenue - from - customer, - orders, - lineitem, - supplier, - nation, - region - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and l_suppkey = s_suppkey - and c_nationkey = s_nationkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'ASIA' - and o_orderdate >= date '1994-01-01' - and o_orderdate < date '1994-01-01' + interval '1' year - group by - n_name - order by - revenue desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q7.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q7.groovy deleted file mode 100644 index 0f33f61c395502..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q7.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading( lineitem broadcast {supplier broadcast n1} {orders shuffle {customer broadcast n2}}) */ - supp_nation, - cust_nation, - l_year, - sum(volume) as revenue - from - ( - select - n1.n_name as supp_nation, - n2.n_name as cust_nation, - extract(year from l_shipdate) as l_year, - l_extendedprice * (1 - l_discount) as volume - from - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2 - where - s_suppkey = l_suppkey - and o_orderkey = l_orderkey - and c_custkey = o_custkey - and s_nationkey = n1.n_nationkey - and c_nationkey = n2.n_nationkey - and ( - (n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY') - or (n1.n_name = 'GERMANY' and n2.n_name = 'FRANCE') - ) - and l_shipdate between date '1995-01-01' and date '1996-12-31' - ) as shipping - group by - supp_nation, - cust_nation, - l_year - order by - supp_nation, - cust_nation, - l_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q8.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q8.groovy deleted file mode 100644 index e70333dd1b051b..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q8.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" - sql 'set enable_runtime_filter_prune=false' - - qt_select """ - explain shape plan - select - /*+ leading( supplier { orders {lineitem broadcast part} {customer broadcast {n1 broadcast region}}} broadcast n2) */ - o_year, - sum(case - when nation = 'BRAZIL' then volume - else 0 - end) / sum(volume) as mkt_share - from - ( - select - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) as volume, - n2.n_name as nation - from - part, - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2, - region - where - p_partkey = l_partkey - and s_suppkey = l_suppkey - and l_orderkey = o_orderkey - and o_custkey = c_custkey - and c_nationkey = n1.n_nationkey - and n1.n_regionkey = r_regionkey - and r_name = 'AMERICA' - and s_nationkey = n2.n_nationkey - and o_orderdate between date '1995-01-01' and date '1996-12-31' - and p_type = 'ECONOMY ANODIZED STEEL' - ) as all_nations - group by - o_year - order by - o_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q9.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q9.groovy deleted file mode 100644 index d1fd50b47c56b8..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q9.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - nation, - o_year, - sum(amount) as sum_profit - from - ( - select - /*+ leading(orders shuffle {lineitem shuffle part} shuffle {supplier broadcast nation} shuffle partsupp) */ - n_name as nation, - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity as amount - from - part, - supplier, - lineitem, - partsupp, - orders, - nation - where - s_suppkey = l_suppkey - and ps_suppkey = l_suppkey - and ps_partkey = l_partkey - and p_partkey = l_partkey - and o_orderkey = l_orderkey - and s_nationkey = n_nationkey - and p_name like '%green%' - ) as profit - group by - nation, - o_year - order by - nation, - o_year desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/load.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/load.groovy deleted file mode 100644 index 2544b972878259..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/load.groovy +++ /dev/null @@ -1,218 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - sql """ -CREATE TABLE IF NOT EXISTS `lineorder` ( - `lo_orderkey` int(11) NOT NULL COMMENT '', - `lo_linenumber` int(11) NOT NULL COMMENT '', - `lo_custkey` int(11) NOT NULL COMMENT '', - `lo_partkey` int(11) NOT NULL COMMENT '', - `lo_suppkey` int(11) NOT NULL COMMENT '', - `lo_orderdate` int(11) NOT NULL COMMENT '', - `lo_orderpriority` varchar(16) NOT NULL COMMENT '', - `lo_shippriority` int(11) NOT NULL COMMENT '', - `lo_quantity` int(11) NOT NULL COMMENT '', - `lo_extendedprice` int(11) NOT NULL COMMENT '', - `lo_ordtotalprice` int(11) NOT NULL COMMENT '', - `lo_discount` int(11) NOT NULL COMMENT '', - `lo_revenue` int(11) NOT NULL COMMENT '', - `lo_supplycost` int(11) NOT NULL COMMENT '', - `lo_tax` int(11) NOT NULL COMMENT '', - `lo_commitdate` int(11) NOT NULL COMMENT '', - `lo_shipmode` varchar(11) NOT NULL COMMENT '' -) ENGINE=OLAP -DUPLICATE KEY(`lo_orderkey`) -COMMENT "OLAP" -PARTITION BY RANGE(`lo_orderdate`) -(PARTITION p1 VALUES [("-2147483648"), ("19930101")), -PARTITION p2 VALUES [("19930101"), ("19940101")), -PARTITION p3 VALUES [("19940101"), ("19950101")), -PARTITION p4 VALUES [("19950101"), ("19960101")), -PARTITION p5 VALUES [("19960101"), ("19970101")), -PARTITION p6 VALUES [("19970101"), ("19980101")), -PARTITION p7 VALUES [("19980101"), ("19990101"))) -DISTRIBUTED BY HASH(`lo_orderkey`) BUCKETS 48 -PROPERTIES ( -"replication_num" = "1", -"colocate_with" = "groupa1", -"in_memory" = "false", -"storage_format" = "DEFAULT" -);""" - -sql """ -CREATE TABLE IF NOT EXISTS `customer` ( - `c_custkey` int(11) NOT NULL COMMENT '', - `c_name` varchar(26) NOT NULL COMMENT '', - `c_address` varchar(41) NOT NULL COMMENT '', - `c_city` varchar(11) NOT NULL COMMENT '', - `c_nation` varchar(16) NOT NULL COMMENT '', - `c_region` varchar(13) NOT NULL COMMENT '', - `c_phone` varchar(16) NOT NULL COMMENT '', - `c_mktsegment` varchar(11) NOT NULL COMMENT '' -) ENGINE=OLAP -DUPLICATE KEY(`c_custkey`) -COMMENT "OLAP" -DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 12 -PROPERTIES ( -"replication_num" = "1", -"colocate_with" = "groupa2", -"in_memory" = "false", -"storage_format" = "DEFAULT" -);""" - -sql """ -CREATE TABLE IF NOT EXISTS `dates` ( - `d_datekey` int(11) NOT NULL COMMENT '', - `d_date` varchar(20) NOT NULL COMMENT '', - `d_dayofweek` varchar(10) NOT NULL COMMENT '', - `d_month` varchar(11) NOT NULL COMMENT '', - `d_year` int(11) NOT NULL COMMENT '', - `d_yearmonthnum` int(11) NOT NULL COMMENT '', - `d_yearmonth` varchar(9) NOT NULL COMMENT '', - `d_daynuminweek` int(11) NOT NULL COMMENT '', - `d_daynuminmonth` int(11) NOT NULL COMMENT '', - `d_daynuminyear` int(11) NOT NULL COMMENT '', - `d_monthnuminyear` int(11) NOT NULL COMMENT '', - `d_weeknuminyear` int(11) NOT NULL COMMENT '', - `d_sellingseason` varchar(14) NOT NULL COMMENT '', - `d_lastdayinweekfl` int(11) NOT NULL COMMENT '', - `d_lastdayinmonthfl` int(11) NOT NULL COMMENT '', - `d_holidayfl` int(11) NOT NULL COMMENT '', - `d_weekdayfl` int(11) NOT NULL COMMENT '' -) ENGINE=OLAP -DUPLICATE KEY(`d_datekey`) -COMMENT "OLAP" -DISTRIBUTED BY HASH(`d_datekey`) BUCKETS 1 -PROPERTIES ( -"replication_num" = "1", -"in_memory" = "false", -"colocate_with" = "groupa3", -"storage_format" = "DEFAULT" -);""" - -sql """ - - CREATE TABLE IF NOT EXISTS `supplier` ( - `s_suppkey` int(11) NOT NULL COMMENT '', - `s_name` varchar(26) NOT NULL COMMENT '', - `s_address` varchar(26) NOT NULL COMMENT '', - `s_city` varchar(11) NOT NULL COMMENT '', - `s_nation` varchar(16) NOT NULL COMMENT '', - `s_region` varchar(13) NOT NULL COMMENT '', - `s_phone` varchar(16) NOT NULL COMMENT '' -) ENGINE=OLAP -DUPLICATE KEY(`s_suppkey`) -COMMENT "OLAP" -DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 12 -PROPERTIES ( -"replication_num" = "1", -"colocate_with" = "groupa4", -"in_memory" = "false", -"storage_format" = "DEFAULT" -);""" - -sql """ -CREATE TABLE IF NOT EXISTS `part` ( - `p_partkey` int(11) NOT NULL COMMENT '', - `p_name` varchar(23) NOT NULL COMMENT '', - `p_mfgr` varchar(7) NOT NULL COMMENT '', - `p_category` varchar(8) NOT NULL COMMENT '', - `p_brand` varchar(10) NOT NULL COMMENT '', - `p_color` varchar(12) NOT NULL COMMENT '', - `p_type` varchar(26) NOT NULL COMMENT '', - `p_size` int(11) NOT NULL COMMENT '', - `p_container` varchar(11) NOT NULL COMMENT '' -) ENGINE=OLAP -DUPLICATE KEY(`p_partkey`) -COMMENT "OLAP" -DISTRIBUTED BY HASH(`p_partkey`) BUCKETS 12 -PROPERTIES ( -"replication_num" = "1", -"colocate_with" = "groupa5", -"in_memory" = "false", -"storage_format" = "DEFAULT" -);""" - -sql """alter table dates modify column d_lastdayinweekfl set stats ('row_count'='2556', 'ndv'='2', 'num_nulls'='0', 'min_value'='0', 'max_value'='1', 'data_size'='10224');""" -sql """alter table supplier modify column s_suppkey set stats ('row_count'='200000', 'ndv'='196099', 'num_nulls'='0', 'min_value'='1', 'max_value'='200000', 'data_size'='800000');""" -sql """alter table lineorder modify column lo_quantity set stats ('row_count'='600037902', 'ndv'='50', 'num_nulls'='0', 'min_value'='1', 'max_value'='50', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_shipmode set stats ('row_count'='600037902', 'ndv'='7', 'num_nulls'='0', 'min_value'='AIR', 'max_value'='TRUCK', 'data_size'='2571562204');""" -sql """alter table customer modify column c_name set stats ('row_count'='3000000', 'ndv'='3017713', 'num_nulls'='0', 'min_value'='Customer#000000001', 'max_value'='Customer#003000000', 'data_size'='54000000');""" -sql """alter table dates modify column d_date set stats ('row_count'='2556', 'ndv'='2539', 'num_nulls'='0', 'min_value'='April 1, 1992', 'max_value'='September 9, 1998', 'data_size'='38181');""" -sql """alter table dates modify column d_daynuminyear set stats ('row_count'='2556', 'ndv'='366', 'num_nulls'='0', 'min_value'='1', 'max_value'='366', 'data_size'='10224');""" -sql """alter table dates modify column d_yearmonth set stats ('row_count'='2556', 'ndv'='84', 'num_nulls'='0', 'min_value'='Apr1992', 'max_value'='Sep1998', 'data_size'='17892');""" -sql """alter table part modify column p_mfgr set stats ('row_count'='1400000', 'ndv'='5', 'num_nulls'='0', 'min_value'='MFGR#1', 'max_value'='MFGR#5', 'data_size'='8400000');""" -sql """alter table part modify column p_name set stats ('row_count'='1400000', 'ndv'='8417', 'num_nulls'='0', 'min_value'='almond antique', 'max_value'='yellow white', 'data_size'='17705366');""" -sql """alter table lineorder modify column lo_extendedprice set stats ('row_count'='600037902', 'ndv'='1135983', 'num_nulls'='0', 'min_value'='90096', 'max_value'='10494950', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_linenumber set stats ('row_count'='600037902', 'ndv'='7', 'num_nulls'='0', 'min_value'='1', 'max_value'='7', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_partkey set stats ('row_count'='600037902', 'ndv'='999528', 'num_nulls'='0', 'min_value'='1', 'max_value'='1000000', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_shippriority set stats ('row_count'='600037902', 'ndv'='1', 'num_nulls'='0', 'min_value'='0', 'max_value'='0', 'data_size'='2400151608');""" -sql """alter table customer modify column c_mktsegment set stats ('row_count'='3000000', 'ndv'='5', 'num_nulls'='0', 'min_value'='AUTOMOBILE', 'max_value'='MACHINERY', 'data_size'='26999329');""" -sql """alter table dates modify column d_dayofweek set stats ('row_count'='2556', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='18258');""" -sql """alter table dates modify column d_sellingseason set stats ('row_count'='2556', 'ndv'='5', 'num_nulls'='0', 'min_value'='Christmas', 'max_value'='Winter', 'data_size'='15760');""" -sql """alter table dates modify column d_weekdayfl set stats ('row_count'='2556', 'ndv'='2', 'num_nulls'='0', 'min_value'='0', 'max_value'='1', 'data_size'='10224');""" -sql """alter table supplier modify column s_city set stats ('row_count'='200000', 'ndv'='250', 'num_nulls'='0', 'min_value'='ALGERIA 0', 'max_value'='VIETNAM 9', 'data_size'='2000000');""" -sql """alter table part modify column p_category set stats ('row_count'='1400000', 'ndv'='25', 'num_nulls'='0', 'min_value'='MFGR#11', 'max_value'='MFGR#55', 'data_size'='9800000');""" -sql """alter table part modify column p_size set stats ('row_count'='1400000', 'ndv'='50', 'num_nulls'='0', 'min_value'='1', 'max_value'='50', 'data_size'='5600000');""" -sql """alter table part modify column p_type set stats ('row_count'='1400000', 'ndv'='150', 'num_nulls'='0', 'min_value'='ECONOMY ANODIZED BRASS', 'max_value'='STANDARD POLISHED TIN', 'data_size'='28837497');""" -sql """alter table lineorder modify column lo_orderkey set stats ('row_count'='600037902', 'ndv'='148064528', 'num_nulls'='0', 'min_value'='1', 'max_value'='600000000', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_revenue set stats ('row_count'='600037902', 'ndv'='6280312', 'num_nulls'='0', 'min_value'='81087', 'max_value'='10494950', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_suppkey set stats ('row_count'='600037902', 'ndv'='196099', 'num_nulls'='0', 'min_value'='1', 'max_value'='200000', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_supplycost set stats ('row_count'='600037902', 'ndv'='15824', 'num_nulls'='0', 'min_value'='54057', 'max_value'='125939', 'data_size'='2400151608');""" -sql """alter table customer modify column c_address set stats ('row_count'='3000000', 'ndv'='3011483', 'num_nulls'='0', 'min_value'=' yaP00NZn4mxv', 'max_value'='zzzzsVRceYXRDisV3RC', 'data_size'='44994193');""" -sql """alter table dates modify column d_datekey set stats ('row_count'='2556', 'ndv'='2560', 'num_nulls'='0', 'min_value'='19920101', 'max_value'='19981230', 'data_size'='10224');""" -sql """alter table dates modify column d_daynuminmonth set stats ('row_count'='2556', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='10224');""" -sql """alter table dates modify column d_year set stats ('row_count'='2556', 'ndv'='7', 'num_nulls'='0', 'min_value'='1992', 'max_value'='1998', 'data_size'='10224');""" -sql """alter table supplier modify column s_address set stats ('row_count'='200000', 'ndv'='197960', 'num_nulls'='0', 'min_value'=' 2MrUy', 'max_value'='zzzqXhTdKxT0RAR8yxbc', 'data_size'='2998285');""" -sql """alter table lineorder modify column lo_commitdate set stats ('row_count'='600037902', 'ndv'='2469', 'num_nulls'='0', 'min_value'='19920131', 'max_value'='19981031', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_tax set stats ('row_count'='600037902', 'ndv'='9', 'num_nulls'='0', 'min_value'='0', 'max_value'='8', 'data_size'='2400151608');""" -sql """alter table customer modify column c_city set stats ('row_count'='3000000', 'ndv'='250', 'num_nulls'='0', 'min_value'='ALGERIA 0', 'max_value'='VIETNAM 9', 'data_size'='30000000');""" -sql """alter table customer modify column c_custkey set stats ('row_count'='3000000', 'ndv'='2985828', 'num_nulls'='0', 'min_value'='1', 'max_value'='3000000', 'data_size'='12000000');""" -sql """alter table dates modify column d_daynuminweek set stats ('row_count'='2556', 'ndv'='7', 'num_nulls'='0', 'min_value'='1', 'max_value'='7', 'data_size'='10224');""" -sql """alter table dates modify column d_lastdayinmonthfl set stats ('row_count'='2556', 'ndv'='2', 'num_nulls'='0', 'min_value'='0', 'max_value'='1', 'data_size'='10224');""" -sql """alter table dates modify column d_month set stats ('row_count'='2556', 'ndv'='12', 'num_nulls'='0', 'min_value'='April', 'max_value'='September', 'data_size'='15933');""" -sql """alter table dates modify column d_yearmonthnum set stats ('row_count'='2556', 'ndv'='84', 'num_nulls'='0', 'min_value'='199201', 'max_value'='199812', 'data_size'='10224');""" -sql """alter table supplier modify column s_phone set stats ('row_count'='200000', 'ndv'='199261', 'num_nulls'='0', 'min_value'='10-100-177-2350', 'max_value'='34-999-827-8511', 'data_size'='3000000');""" -sql """alter table part modify column p_partkey set stats ('row_count'='1400000', 'ndv'='1394881', 'num_nulls'='0', 'min_value'='1', 'max_value'='1400000', 'data_size'='5600000');""" -sql """alter table lineorder modify column lo_custkey set stats ('row_count'='600037902', 'ndv'='1962895', 'num_nulls'='0', 'min_value'='1', 'max_value'='2999999', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_orderdate set stats ('row_count'='600037902', 'ndv'='2408', 'num_nulls'='0', 'min_value'='19920101', 'max_value'='19980802', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_ordtotalprice set stats ('row_count'='600037902', 'ndv'='35026888', 'num_nulls'='0', 'min_value'='81806', 'max_value'='60690215', 'data_size'='2400151608');""" -sql """alter table customer modify column c_nation set stats ('row_count'='3000000', 'ndv'='25', 'num_nulls'='0', 'min_value'='ALGERIA', 'max_value'='VIETNAM', 'data_size'='21248112');""" -sql """alter table customer modify column c_phone set stats ('row_count'='3000000', 'ndv'='3012496', 'num_nulls'='0', 'min_value'='10-100-106-1617', 'max_value'='34-999-998-5763', 'data_size'='45000000');""" -sql """alter table customer modify column c_region set stats ('row_count'='3000000', 'ndv'='5', 'num_nulls'='0', 'min_value'='AFRICA', 'max_value'='MIDDLE EAST', 'data_size'='20398797');""" -sql """alter table dates modify column d_holidayfl set stats ('row_count'='2556', 'ndv'='2', 'num_nulls'='0', 'min_value'='0', 'max_value'='1', 'data_size'='10224');""" -sql """alter table dates modify column d_weeknuminyear set stats ('row_count'='2556', 'ndv'='53', 'num_nulls'='0', 'min_value'='1', 'max_value'='53', 'data_size'='10224');""" -sql """alter table supplier modify column s_nation set stats ('row_count'='200000', 'ndv'='25', 'num_nulls'='0', 'min_value'='ALGERIA', 'max_value'='VIETNAM', 'data_size'='1415335');""" -sql """alter table part modify column p_brand set stats ('row_count'='1400000', 'ndv'='1002', 'num_nulls'='0', 'min_value'='MFGR#111', 'max_value'='MFGR#559', 'data_size'='12285135');""" -sql """alter table part modify column p_color set stats ('row_count'='1400000', 'ndv'='92', 'num_nulls'='0', 'min_value'='almond', 'max_value'='yellow', 'data_size'='8170588');""" -sql """alter table part modify column p_container set stats ('row_count'='1400000', 'ndv'='40', 'num_nulls'='0', 'min_value'='JUMBO BAG', 'max_value'='WRAP PKG', 'data_size'='10606696');""" -sql """alter table lineorder modify column lo_discount set stats ('row_count'='600037902', 'ndv'='11', 'num_nulls'='0', 'min_value'='0', 'max_value'='10', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_orderpriority set stats ('row_count'='600037902', 'ndv'='5', 'num_nulls'='0', 'min_value'='1-URGENT', 'max_value'='5-LOW', 'data_size'='5040804567');""" -sql """alter table dates modify column d_monthnuminyear set stats ('row_count'='2556', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='10224');""" -sql """alter table supplier modify column s_name set stats ('row_count'='200000', 'ndv'='201596', 'num_nulls'='0', 'min_value'='Supplier#000000001', 'max_value'='Supplier#000200000', 'data_size'='3600000');""" -sql """alter table supplier modify column s_region set stats ('row_count'='200000', 'ndv'='5', 'num_nulls'='0', 'min_value'='AFRICA', 'max_value'='MIDDLE EAST', 'data_size'='1360337');""" - - -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/flat.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/flat.groovy deleted file mode 100644 index 1ba75597477e43..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/flat.groovy +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1.1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql "set enable_parallel_result_sink=false;" - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - LO_ORDERDATE, - LO_ORDERKEY, - LO_LINENUMBER, - LO_CUSTKEY, - LO_PARTKEY, - LO_SUPPKEY, - LO_ORDERPRIORITY, - LO_SHIPPRIORITY, - LO_QUANTITY, - LO_EXTENDEDPRICE, - LO_ORDTOTALPRICE, - LO_DISCOUNT, - LO_REVENUE, - LO_SUPPLYCOST, - LO_TAX, - LO_COMMITDATE, - LO_SHIPMODE, - C_NAME, - C_ADDRESS, - C_CITY, - C_NATION, - C_REGION, - C_PHONE, - C_MKTSEGMENT, - S_NAME, - S_ADDRESS, - S_CITY, - S_NATION, - S_REGION, - S_PHONE, - P_NAME, - P_MFGR, - P_CATEGORY, - P_BRAND, - P_COLOR, - P_TYPE, - P_SIZE, - P_CONTAINER - FROM ( - SELECT - lo_orderkey, - lo_linenumber, - lo_custkey, - lo_partkey, - lo_suppkey, - lo_orderdate, - lo_orderpriority, - lo_shippriority, - lo_quantity, - lo_extendedprice, - lo_ordtotalprice, - lo_discount, - lo_revenue, - lo_supplycost, - lo_tax, - lo_commitdate, - lo_shipmode - FROM lineorder - ) l - INNER JOIN customer c - ON (c.c_custkey = l.lo_custkey) - INNER JOIN supplier s - ON (s.s_suppkey = l.lo_suppkey) - INNER JOIN part p - ON (p.p_partkey = l.lo_partkey); - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.1.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.1.groovy deleted file mode 100644 index 42b34b901e9c0e..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.1.groovy +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1.1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT SUM(lo_extendedprice * lo_discount) AS REVENUE -FROM lineorder, dates -WHERE - lo_orderdate = d_datekey - AND d_year = 1993 - AND lo_discount BETWEEN 1 AND 3 - AND lo_quantity < 25; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.2.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.2.groovy deleted file mode 100644 index 5ce77e665c070d..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.2.groovy +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1.2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT SUM(lo_extendedprice * lo_discount) AS REVENUE -FROM lineorder, dates -WHERE - lo_orderdate = d_datekey - AND d_yearmonth = 'Jan1994' - AND lo_discount BETWEEN 4 AND 6 - AND lo_quantity BETWEEN 26 AND 35; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.3.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.3.groovy deleted file mode 100644 index 61155a26f647af..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.3.groovy +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1.3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - SUM(lo_extendedprice * lo_discount) AS REVENUE -FROM lineorder, dates -WHERE - lo_orderdate = d_datekey - AND d_weeknuminyear = 6 - AND d_year = 1994 - AND lo_discount BETWEEN 5 AND 7 - AND lo_quantity BETWEEN 26 AND 35; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.1.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.1.groovy deleted file mode 100644 index 8d3232ec783355..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.1.groovy +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2.1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT SUM(lo_revenue), d_year, p_brand -FROM lineorder, dates, part, supplier -WHERE - lo_orderdate = d_datekey - AND lo_partkey = p_partkey - AND lo_suppkey = s_suppkey - AND p_category = 'MFGR#12' - AND s_region = 'AMERICA' -GROUP BY d_year, p_brand -ORDER BY p_brand; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.2.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.2.groovy deleted file mode 100644 index dd614fecc779b5..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.2.groovy +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2.2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan -SELECT SUM(lo_revenue), d_year, p_brand -FROM lineorder, dates, part, supplier -WHERE - lo_orderdate = d_datekey - AND lo_partkey = p_partkey - AND lo_suppkey = s_suppkey - AND p_brand BETWEEN 'MFGR#2221' AND 'MFGR#2228' - AND s_region = 'ASIA' -GROUP BY d_year, p_brand -ORDER BY d_year, p_brand; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.3.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.3.groovy deleted file mode 100644 index cf39d3d1047e8d..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.3.groovy +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2.3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan -SELECT SUM(lo_revenue), d_year, p_brand -FROM lineorder, dates, part, supplier -WHERE - lo_orderdate = d_datekey - AND lo_partkey = p_partkey - AND lo_suppkey = s_suppkey - AND p_brand = 'MFGR#2239' - AND s_region = 'EUROPE' -GROUP BY d_year, p_brand -ORDER BY d_year, p_brand; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.1.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.1.groovy deleted file mode 100644 index a5a7eadb3735e2..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.1.groovy +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3.1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - c_nation, - s_nation, - d_year, - SUM(lo_revenue) AS REVENUE -FROM customer, lineorder, supplier, dates -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_orderdate = d_datekey - AND c_region = 'ASIA' - AND s_region = 'ASIA' - AND d_year >= 1992 - AND d_year <= 1997 -GROUP BY c_nation, s_nation, d_year -ORDER BY d_year ASC, REVENUE DESC; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.2.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.2.groovy deleted file mode 100644 index 4755927b522171..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.2.groovy +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3.2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - c_city, - s_city, - d_year, - SUM(lo_revenue) AS REVENUE -FROM customer, lineorder, supplier, dates -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_orderdate = d_datekey - AND c_nation = 'UNITED STATES' - AND s_nation = 'UNITED STATES' - AND d_year >= 1992 - AND d_year <= 1997 -GROUP BY c_city, s_city, d_year -ORDER BY d_year ASC, REVENUE DESC; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.3.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.3.groovy deleted file mode 100644 index 7f349a8f0841fc..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.3.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3.3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - c_city, - s_city, - d_year, - SUM(lo_revenue) AS REVENUE -FROM customer, lineorder, supplier, dates -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_orderdate = d_datekey - AND ( - c_city = 'UNITED KI1' - OR c_city = 'UNITED KI5' - ) - AND ( - s_city = 'UNITED KI1' - OR s_city = 'UNITED KI5' - ) - AND d_year >= 1992 - AND d_year <= 1997 -GROUP BY c_city, s_city, d_year -ORDER BY d_year ASC, REVENUE DESC; -""" -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.4.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.4.groovy deleted file mode 100644 index ff34697a8521aa..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.4.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3.4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - c_city, - s_city, - d_year, - SUM(lo_revenue) AS REVENUE -FROM customer, lineorder, supplier, dates -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_orderdate = d_datekey - AND ( - c_city = 'UNITED KI1' - OR c_city = 'UNITED KI5' - ) - AND ( - s_city = 'UNITED KI1' - OR s_city = 'UNITED KI5' - ) - AND d_yearmonth = 'Dec1997' -GROUP BY c_city, s_city, d_year -ORDER BY d_year ASC, REVENUE DESC; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.1.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.1.groovy deleted file mode 100644 index 91d4bf499a1c39..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.1.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4.1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - d_year, - c_nation, - SUM(lo_revenue - lo_supplycost) AS PROFIT -FROM dates, customer, supplier, part, lineorder -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_partkey = p_partkey - AND lo_orderdate = d_datekey - AND c_region = 'AMERICA' - AND s_region = 'AMERICA' - AND ( - p_mfgr = 'MFGR#1' - OR p_mfgr = 'MFGR#2' - ) -GROUP BY d_year, c_nation -ORDER BY d_year, c_nation; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.2.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.2.groovy deleted file mode 100644 index da8b425a810a4b..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.2.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4.2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - d_year, - s_nation, - p_category, - SUM(lo_revenue - lo_supplycost) AS PROFIT -FROM dates, customer, supplier, part, lineorder -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_partkey = p_partkey - AND lo_orderdate = d_datekey - AND c_region = 'AMERICA' - AND s_region = 'AMERICA' - AND ( - d_year = 1997 - OR d_year = 1998 - ) - AND ( - p_mfgr = 'MFGR#1' - OR p_mfgr = 'MFGR#2' - ) -GROUP BY d_year, s_nation, p_category -ORDER BY d_year, s_nation, p_category; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.3.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.3.groovy deleted file mode 100644 index bb3f5de73837bd..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.3.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4.3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - d_year, - s_city, - p_brand, - SUM(lo_revenue - lo_supplycost) AS PROFIT -FROM dates, customer, supplier, part, lineorder -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_partkey = p_partkey - AND lo_orderdate = d_datekey - AND s_nation = 'UNITED STATES' - AND ( - d_year = 1997 - OR d_year = 1998 - ) - AND p_category = 'MFGR#14' -GROUP BY d_year, s_city, p_brand -ORDER BY d_year, s_city, p_brand; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/load.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/load.groovy deleted file mode 100644 index 1ed3ebba10e9f8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/load.groovy +++ /dev/null @@ -1,2552 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - - sql ''' - drop table if exists customer_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_demographics ( - cd_demo_sk bigint not null, - cd_gender char(1), - cd_marital_status char(1), - cd_education_status char(20), - cd_purchase_estimate integer, - cd_credit_rating char(10), - cd_dep_count integer, - cd_dep_employed_count integer, - cd_dep_college_count integer - ) - DUPLICATE KEY(cd_demo_sk) - DISTRIBUTED BY HASH(cd_gender) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists reason - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS reason ( - r_reason_sk bigint not null, - r_reason_id char(16) not null, - r_reason_desc char(100) - ) - DUPLICATE KEY(r_reason_sk) - DISTRIBUTED BY HASH(r_reason_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists date_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS date_dim ( - d_date_sk bigint not null, - d_date_id char(16) not null, - d_date datev2, - d_month_seq integer, - d_week_seq integer, - d_quarter_seq integer, - d_year integer, - d_dow integer, - d_moy integer, - d_dom integer, - d_qoy integer, - d_fy_year integer, - d_fy_quarter_seq integer, - d_fy_week_seq integer, - d_day_name char(9), - d_quarter_name char(6), - d_holiday char(1), - d_weekend char(1), - d_following_holiday char(1), - d_first_dom integer, - d_last_dom integer, - d_same_day_ly integer, - d_same_day_lq integer, - d_current_day char(1), - d_current_week char(1), - d_current_month char(1), - d_current_quarter char(1), - d_current_year char(1) - ) - DUPLICATE KEY(d_date_sk) - DISTRIBUTED BY HASH(d_date_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists warehouse - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS warehouse ( - w_warehouse_sk bigint not null, - w_warehouse_id char(16) not null, - w_warehouse_name varchar(20), - w_warehouse_sq_ft integer, - w_street_number char(10), - w_street_name varchar(60), - w_street_type char(15), - w_suite_number char(10), - w_city varchar(60), - w_county varchar(30), - w_state char(2), - w_zip char(10), - w_country varchar(20), - w_gmt_offset decimalv3(5,2) - ) - DUPLICATE KEY(w_warehouse_sk) - DISTRIBUTED BY HASH(w_warehouse_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_sales ( - cs_sold_date_sk bigint, - cs_item_sk bigint not null, - cs_order_number bigint not null, - cs_sold_time_sk bigint, - cs_ship_date_sk bigint, - cs_bill_customer_sk bigint, - cs_bill_cdemo_sk bigint, - cs_bill_hdemo_sk bigint, - cs_bill_addr_sk bigint, - cs_ship_customer_sk bigint, - cs_ship_cdemo_sk bigint, - cs_ship_hdemo_sk bigint, - cs_ship_addr_sk bigint, - cs_call_center_sk bigint, - cs_catalog_page_sk bigint, - cs_ship_mode_sk bigint, - cs_warehouse_sk bigint, - cs_promo_sk bigint, - cs_quantity integer, - cs_wholesale_cost decimalv3(7,2), - cs_list_price decimalv3(7,2), - cs_sales_price decimalv3(7,2), - cs_ext_discount_amt decimalv3(7,2), - cs_ext_sales_price decimalv3(7,2), - cs_ext_wholesale_cost decimalv3(7,2), - cs_ext_list_price decimalv3(7,2), - cs_ext_tax decimalv3(7,2), - cs_coupon_amt decimalv3(7,2), - cs_ext_ship_cost decimalv3(7,2), - cs_net_paid decimalv3(7,2), - cs_net_paid_inc_tax decimalv3(7,2), - cs_net_paid_inc_ship decimalv3(7,2), - cs_net_paid_inc_ship_tax decimalv3(7,2), - cs_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(cs_sold_date_sk, cs_item_sk) - DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists call_center - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS call_center ( - cc_call_center_sk bigint not null, - cc_call_center_id char(16) not null, - cc_rec_start_date datev2, - cc_rec_end_date datev2, - cc_closed_date_sk integer, - cc_open_date_sk integer, - cc_name varchar(50), - cc_class varchar(50), - cc_employees integer, - cc_sq_ft integer, - cc_hours char(20), - cc_manager varchar(40), - cc_mkt_id integer, - cc_mkt_class char(50), - cc_mkt_desc varchar(100), - cc_market_manager varchar(40), - cc_division integer, - cc_division_name varchar(50), - cc_company integer, - cc_company_name char(50), - cc_street_number char(10), - cc_street_name varchar(60), - cc_street_type char(15), - cc_suite_number char(10), - cc_city varchar(60), - cc_county varchar(30), - cc_state char(2), - cc_zip char(10), - cc_country varchar(20), - cc_gmt_offset decimalv3(5,2), - cc_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(cc_call_center_sk) - DISTRIBUTED BY HASH(cc_call_center_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists inventory - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS inventory ( - inv_date_sk bigint not null, - inv_item_sk bigint not null, - inv_warehouse_sk bigint, - inv_quantity_on_hand integer - ) - DUPLICATE KEY(inv_date_sk, inv_item_sk, inv_warehouse_sk) - DISTRIBUTED BY HASH(inv_date_sk, inv_item_sk, inv_warehouse_sk) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_returns ( - cr_item_sk bigint not null, - cr_order_number bigint not null, - cr_returned_date_sk bigint, - cr_returned_time_sk bigint, - cr_refunded_customer_sk bigint, - cr_refunded_cdemo_sk bigint, - cr_refunded_hdemo_sk bigint, - cr_refunded_addr_sk bigint, - cr_returning_customer_sk bigint, - cr_returning_cdemo_sk bigint, - cr_returning_hdemo_sk bigint, - cr_returning_addr_sk bigint, - cr_call_center_sk bigint, - cr_catalog_page_sk bigint, - cr_ship_mode_sk bigint, - cr_warehouse_sk bigint, - cr_reason_sk bigint, - cr_return_quantity integer, - cr_return_amount decimalv3(7,2), - cr_return_tax decimalv3(7,2), - cr_return_amt_inc_tax decimalv3(7,2), - cr_fee decimalv3(7,2), - cr_return_ship_cost decimalv3(7,2), - cr_refunded_cash decimalv3(7,2), - cr_reversed_charge decimalv3(7,2), - cr_store_credit decimalv3(7,2), - cr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(cr_item_sk, cr_order_number) - DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists household_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS household_demographics ( - hd_demo_sk bigint not null, - hd_income_band_sk bigint, - hd_buy_potential char(15), - hd_dep_count integer, - hd_vehicle_count integer - ) - DUPLICATE KEY(hd_demo_sk) - DISTRIBUTED BY HASH(hd_demo_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer_address - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_address ( - ca_address_sk bigint not null, - ca_address_id char(16) not null, - ca_street_number char(10), - ca_street_name varchar(60), - ca_street_type char(15), - ca_suite_number char(10), - ca_city varchar(60), - ca_county varchar(30), - ca_state char(2), - ca_zip char(10), - ca_country varchar(20), - ca_gmt_offset decimalv3(5,2), - ca_location_type char(20) - ) - DUPLICATE KEY(ca_address_sk) - DISTRIBUTED BY HASH(ca_address_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists income_band - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS income_band ( - ib_income_band_sk bigint not null, - ib_lower_bound integer, - ib_upper_bound integer - ) - DUPLICATE KEY(ib_income_band_sk) - DISTRIBUTED BY HASH(ib_income_band_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_page ( - cp_catalog_page_sk bigint not null, - cp_catalog_page_id char(16) not null, - cp_start_date_sk integer, - cp_end_date_sk integer, - cp_department varchar(50), - cp_catalog_number integer, - cp_catalog_page_number integer, - cp_description varchar(100), - cp_type varchar(100) - ) - DUPLICATE KEY(cp_catalog_page_sk) - DISTRIBUTED BY HASH(cp_catalog_page_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists item - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS item ( - i_item_sk bigint not null, - i_item_id char(16) not null, - i_rec_start_date datev2, - i_rec_end_date datev2, - i_item_desc varchar(200), - i_current_price decimalv3(7,2), - i_wholesale_cost decimalv3(7,2), - i_brand_id integer, - i_brand char(50), - i_class_id integer, - i_class char(50), - i_category_id integer, - i_category char(50), - i_manufact_id integer, - i_manufact char(50), - i_size char(20), - i_formulation char(20), - i_color char(20), - i_units char(10), - i_container char(10), - i_manager_id integer, - i_product_name char(50) - ) - DUPLICATE KEY(i_item_sk) - DISTRIBUTED BY HASH(i_item_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_returns ( - wr_item_sk bigint not null, - wr_order_number bigint not null, - wr_returned_date_sk bigint, - wr_returned_time_sk bigint, - wr_refunded_customer_sk bigint, - wr_refunded_cdemo_sk bigint, - wr_refunded_hdemo_sk bigint, - wr_refunded_addr_sk bigint, - wr_returning_customer_sk bigint, - wr_returning_cdemo_sk bigint, - wr_returning_hdemo_sk bigint, - wr_returning_addr_sk bigint, - wr_web_page_sk bigint, - wr_reason_sk bigint, - wr_return_quantity integer, - wr_return_amt decimalv3(7,2), - wr_return_tax decimalv3(7,2), - wr_return_amt_inc_tax decimalv3(7,2), - wr_fee decimalv3(7,2), - wr_return_ship_cost decimalv3(7,2), - wr_refunded_cash decimalv3(7,2), - wr_reversed_charge decimalv3(7,2), - wr_account_credit decimalv3(7,2), - wr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(wr_item_sk, wr_order_number) - DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists web_site - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_site ( - web_site_sk bigint not null, - web_site_id char(16) not null, - web_rec_start_date datev2, - web_rec_end_date datev2, - web_name varchar(50), - web_open_date_sk bigint, - web_close_date_sk bigint, - web_class varchar(50), - web_manager varchar(40), - web_mkt_id integer, - web_mkt_class varchar(50), - web_mkt_desc varchar(100), - web_market_manager varchar(40), - web_company_id integer, - web_company_name char(50), - web_street_number char(10), - web_street_name varchar(60), - web_street_type char(15), - web_suite_number char(10), - web_city varchar(60), - web_county varchar(30), - web_state char(2), - web_zip char(10), - web_country varchar(20), - web_gmt_offset decimalv3(5,2), - web_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(web_site_sk) - DISTRIBUTED BY HASH(web_site_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists promotion - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS promotion ( - p_promo_sk bigint not null, - p_promo_id char(16) not null, - p_start_date_sk bigint, - p_end_date_sk bigint, - p_item_sk bigint, - p_cost decimalv3(15,2), - p_response_targe integer, - p_promo_name char(50), - p_channel_dmail char(1), - p_channel_email char(1), - p_channel_catalog char(1), - p_channel_tv char(1), - p_channel_radio char(1), - p_channel_press char(1), - p_channel_event char(1), - p_channel_demo char(1), - p_channel_details varchar(100), - p_purpose char(15), - p_discount_active char(1) - ) - DUPLICATE KEY(p_promo_sk) - DISTRIBUTED BY HASH(p_promo_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_sales ( - ws_sold_date_sk bigint, - ws_item_sk bigint not null, - ws_order_number bigint not null, - ws_sold_time_sk bigint, - ws_ship_date_sk bigint, - ws_bill_customer_sk bigint, - ws_bill_cdemo_sk bigint, - ws_bill_hdemo_sk bigint, - ws_bill_addr_sk bigint, - ws_ship_customer_sk bigint, - ws_ship_cdemo_sk bigint, - ws_ship_hdemo_sk bigint, - ws_ship_addr_sk bigint, - ws_web_page_sk bigint, - ws_web_site_sk bigint, - ws_ship_mode_sk bigint, - ws_warehouse_sk bigint, - ws_promo_sk bigint, - ws_quantity integer, - ws_wholesale_cost decimalv3(7,2), - ws_list_price decimalv3(7,2), - ws_sales_price decimalv3(7,2), - ws_ext_discount_amt decimalv3(7,2), - ws_ext_sales_price decimalv3(7,2), - ws_ext_wholesale_cost decimalv3(7,2), - ws_ext_list_price decimalv3(7,2), - ws_ext_tax decimalv3(7,2), - ws_coupon_amt decimalv3(7,2), - ws_ext_ship_cost decimalv3(7,2), - ws_net_paid decimalv3(7,2), - ws_net_paid_inc_tax decimalv3(7,2), - ws_net_paid_inc_ship decimalv3(7,2), - ws_net_paid_inc_ship_tax decimalv3(7,2), - ws_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ws_sold_date_sk, ws_item_sk) - DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists store - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store ( - s_store_sk bigint not null, - s_store_id char(16) not null, - s_rec_start_date datev2, - s_rec_end_date datev2, - s_closed_date_sk bigint, - s_store_name varchar(50), - s_number_employees integer, - s_floor_space integer, - s_hours char(20), - s_manager varchar(40), - s_market_id integer, - s_geography_class varchar(100), - s_market_desc varchar(100), - s_market_manager varchar(40), - s_division_id integer, - s_division_name varchar(50), - s_company_id integer, - s_company_name varchar(50), - s_street_number varchar(10), - s_street_name varchar(60), - s_street_type char(15), - s_suite_number char(10), - s_city varchar(60), - s_county varchar(30), - s_state char(2), - s_zip char(10), - s_country varchar(20), - s_gmt_offset decimalv3(5,2), - s_tax_precentage decimalv3(5,2) - ) - DUPLICATE KEY(s_store_sk) - DISTRIBUTED BY HASH(s_store_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists time_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS time_dim ( - t_time_sk bigint not null, - t_time_id char(16) not null, - t_time integer, - t_hour integer, - t_minute integer, - t_second integer, - t_am_pm char(2), - t_shift char(20), - t_sub_shift char(20), - t_meal_time char(20) - ) - DUPLICATE KEY(t_time_sk) - DISTRIBUTED BY HASH(t_time_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_page ( - wp_web_page_sk bigint not null, - wp_web_page_id char(16) not null, - wp_rec_start_date datev2, - wp_rec_end_date datev2, - wp_creation_date_sk bigint, - wp_access_date_sk bigint, - wp_autogen_flag char(1), - wp_customer_sk bigint, - wp_url varchar(100), - wp_type char(50), - wp_char_count integer, - wp_link_count integer, - wp_image_count integer, - wp_max_ad_count integer - ) - DUPLICATE KEY(wp_web_page_sk) - DISTRIBUTED BY HASH(wp_web_page_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists store_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_returns ( - sr_item_sk bigint not null, - sr_ticket_number bigint not null, - sr_returned_date_sk bigint, - sr_return_time_sk bigint, - sr_customer_sk bigint, - sr_cdemo_sk bigint, - sr_hdemo_sk bigint, - sr_addr_sk bigint, - sr_store_sk bigint, - sr_reason_sk bigint, - sr_return_quantity integer, - sr_return_amt decimalv3(7,2), - sr_return_tax decimalv3(7,2), - sr_return_amt_inc_tax decimalv3(7,2), - sr_fee decimalv3(7,2), - sr_return_ship_cost decimalv3(7,2), - sr_refunded_cash decimalv3(7,2), - sr_reversed_charge decimalv3(7,2), - sr_store_credit decimalv3(7,2), - sr_net_loss decimalv3(7,2) - ) - duplicate key(sr_item_sk, sr_ticket_number) - distributed by hash (sr_item_sk, sr_ticket_number) buckets 32 - properties ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists store_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_sales ( - ss_sold_date_sk bigint, - ss_item_sk bigint not null, - ss_ticket_number bigint not null, - ss_sold_time_sk bigint, - ss_customer_sk bigint, - ss_cdemo_sk bigint, - ss_hdemo_sk bigint, - ss_addr_sk bigint, - ss_store_sk bigint, - ss_promo_sk bigint, - ss_quantity integer, - ss_wholesale_cost decimalv3(7,2), - ss_list_price decimalv3(7,2), - ss_sales_price decimalv3(7,2), - ss_ext_discount_amt decimalv3(7,2), - ss_ext_sales_price decimalv3(7,2), - ss_ext_wholesale_cost decimalv3(7,2), - ss_ext_list_price decimalv3(7,2), - ss_ext_tax decimalv3(7,2), - ss_coupon_amt decimalv3(7,2), - ss_net_paid decimalv3(7,2), - ss_net_paid_inc_tax decimalv3(7,2), - ss_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ss_sold_date_sk, ss_item_sk) - DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists ship_mode - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS ship_mode ( - sm_ship_mode_sk bigint not null, - sm_ship_mode_id char(16) not null, - sm_type char(30), - sm_code char(10), - sm_carrier char(20), - sm_contract char(20) - ) - DUPLICATE KEY(sm_ship_mode_sk) - DISTRIBUTED BY HASH(sm_ship_mode_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer ( - c_customer_sk bigint not null, - c_customer_id char(16) not null, - c_current_cdemo_sk bigint, - c_current_hdemo_sk bigint, - c_current_addr_sk bigint, - c_first_shipto_date_sk bigint, - c_first_sales_date_sk bigint, - c_salutation char(10), - c_first_name char(20), - c_last_name char(30), - c_preferred_cust_flag char(1), - c_birth_day integer, - c_birth_month integer, - c_birth_year integer, - c_birth_country varchar(20), - c_login char(13), - c_email_address char(50), - c_last_review_date_sk bigint - ) - DUPLICATE KEY(c_customer_sk) - DISTRIBUTED BY HASH(c_customer_id) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists dbgen_version - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS dbgen_version - ( - dv_version varchar(16) , - dv_create_date datev2 , - dv_create_time datetime , - dv_cmdline_args varchar(200) - ) - DUPLICATE KEY(dv_version) - DISTRIBUTED BY HASH(dv_version) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - alter table customer add constraint customer_pk primary key (c_customer_sk); - ''' - - sql ''' - alter table customer add constraint customer_uk unique (c_customer_id); - ''' - - sql ''' - alter table store_sales add constraint ss_fk foreign key(ss_customer_sk) references customer(c_customer_sk); - ''' - - sql ''' - alter table web_sales add constraint ws_fk foreign key(ws_bill_customer_sk) references customer(c_customer_sk); - ''' - - sql ''' - alter table catalog_sales add constraint cs_fk foreign key(cs_bill_customer_sk) references customer(c_customer_sk); - ''' - - sql ''' - alter table item add constraint i_item_sk_pk primary key (i_item_sk) - ''' - - sql """ - alter table customer_demographics modify column cd_dep_employed_count set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='7683200') - """ - - sql """ - alter table date_dim modify column d_day_name set stats ('row_count'='73049', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='521779') - """ - - sql """ - alter table date_dim modify column d_following_holiday set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_same_day_ly set stats ('row_count'='73049', 'ndv'='72450', 'num_nulls'='0', 'min_value'='2414657', 'max_value'='2487705', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_city set stats ('row_count'='20', 'ndv'='12', 'num_nulls'='0', 'min_value'='Fairview', 'max_value'='Shiloh', 'data_size'='183') - """ - - sql """ - alter table warehouse modify column w_street_type set stats ('row_count'='20', 'ndv'='14', 'num_nulls'='0', 'min_value'='', 'max_value'='Wy', 'data_size'='71') - """ - - sql """ - alter table catalog_sales modify column cs_call_center_sk set stats ('row_count'='1439980416', 'ndv'='42', 'num_nulls'='7199711', 'min_value'='1', 'max_value'='42', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid_inc_ship set stats ('row_count'='1439980416', 'ndv'='2505826', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='43956.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_sales_price set stats ('row_count'='1439980416', 'ndv'='29306', 'num_nulls'='7200276', 'min_value'='0.00', 'max_value'='300.00', 'data_size'='5759921664') - """ - - sql """ - alter table call_center modify column cc_class set stats ('row_count'='42', 'ndv'='3', 'num_nulls'='0', 'min_value'='large', 'max_value'='small', 'data_size'='226') - """ - - sql """ - alter table call_center modify column cc_country set stats ('row_count'='42', 'ndv'='1', 'num_nulls'='0', 'min_value'='United States', 'max_value'='United States', 'data_size'='546') - """ - - sql """ - alter table call_center modify column cc_county set stats ('row_count'='42', 'ndv'='16', 'num_nulls'='0', 'min_value'='Barrow County', 'max_value'='Williamson County', 'data_size'='627') - """ - - sql """ - alter table call_center modify column cc_mkt_class set stats ('row_count'='42', 'ndv'='36', 'num_nulls'='0', 'min_value'='A bit narrow forms matter animals. Consist', 'max_value'='Yesterday new men can make moreov', 'data_size'='1465') - """ - - sql """ - alter table call_center modify column cc_sq_ft set stats ('row_count'='42', 'ndv'='31', 'num_nulls'='0', 'min_value'='-1890660328', 'max_value'='2122480316', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_state set stats ('row_count'='42', 'ndv'='14', 'num_nulls'='0', 'min_value'='FL', 'max_value'='WV', 'data_size'='84') - """ - - sql """ - alter table inventory modify column inv_warehouse_sk set stats ('row_count'='783000000', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='6264000000') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_addr_sk set stats ('row_count'='143996756', 'ndv'='6015811', 'num_nulls'='2881609', 'min_value'='1', 'max_value'='6000000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_cash set stats ('row_count'='143996756', 'ndv'='1107525', 'num_nulls'='2879192', 'min_value'='0.00', 'max_value'='26955.24', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_cdemo_sk set stats ('row_count'='143996756', 'ndv'='1916366', 'num_nulls'='2881314', 'min_value'='1', 'max_value'='1920800', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_return_amt_inc_tax set stats ('row_count'='143996756', 'ndv'='1544502', 'num_nulls'='2881886', 'min_value'='0.00', 'max_value'='30418.06', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returning_addr_sk set stats ('row_count'='143996756', 'ndv'='6015811', 'num_nulls'='2883215', 'min_value'='1', 'max_value'='6000000', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_buy_potential set stats ('row_count'='7200', 'ndv'='6', 'num_nulls'='0', 'min_value'='0-500', 'max_value'='Unknown', 'data_size'='54000') - """ - - sql """ - alter table customer_address modify column ca_address_id set stats ('row_count'='6000000', 'ndv'='5984931', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAABAA', 'max_value'='AAAAAAAAPPPPPEAA', 'data_size'='96000000') - """ - - sql """ - alter table customer_address modify column ca_address_sk set stats ('row_count'='6000000', 'ndv'='6015811', 'num_nulls'='0', 'min_value'='1', 'max_value'='6000000', 'data_size'='48000000') - """ - - sql """ - alter table customer_address modify column ca_country set stats ('row_count'='6000000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='United States', 'data_size'='75661794') - """ - - sql """ - alter table customer_address modify column ca_location_type set stats ('row_count'='6000000', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='single family', 'data_size'='52372545') - """ - - sql """ - alter table customer_address modify column ca_street_number set stats ('row_count'='6000000', 'ndv'='1002', 'num_nulls'='0', 'min_value'='', 'max_value'='999', 'data_size'='16837336') - """ - - sql """ - alter table customer_address modify column ca_suite_number set stats ('row_count'='6000000', 'ndv'='76', 'num_nulls'='0', 'min_value'='', 'max_value'='Suite Y', 'data_size'='45911575') - """ - - sql """ - alter table catalog_page modify column cp_catalog_page_id set stats ('row_count'='30000', 'ndv'='29953', 'num_nulls'='0', 'min_value'='AAAAAAAAAAABAAAA', 'max_value'='AAAAAAAAPPPGAAAA', 'data_size'='480000') - """ - - sql """ - alter table item modify column i_rec_end_date set stats ('row_count'='300000', 'ndv'='3', 'num_nulls'='150000', 'min_value'='1999-10-27', 'max_value'='2001-10-26', 'data_size'='1200000') - """ - - sql """ - alter table web_returns modify column wr_refunded_addr_sk set stats ('row_count'='71997522', 'ndv'='6015811', 'num_nulls'='3239971', 'min_value'='1', 'max_value'='6000000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_reversed_charge set stats ('row_count'='71997522', 'ndv'='692680', 'num_nulls'='3239546', 'min_value'='0.00', 'max_value'='23194.77', 'data_size'='287990088') - """ - - sql """ - alter table web_site modify column web_state set stats ('row_count'='54', 'ndv'='18', 'num_nulls'='0', 'min_value'='AL', 'max_value'='WV', 'data_size'='108') - """ - - sql """ - alter table promotion modify column p_end_date_sk set stats ('row_count'='1500', 'ndv'='683', 'num_nulls'='18', 'min_value'='2450113', 'max_value'='2450967', 'data_size'='12000') - """ - - sql """ - alter table web_sales modify column ws_bill_hdemo_sk set stats ('row_count'='720000376', 'ndv'='7251', 'num_nulls'='180139', 'min_value'='1', 'max_value'='7200', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ext_ship_cost set stats ('row_count'='720000376', 'ndv'='567477', 'num_nulls'='180084', 'min_value'='0.00', 'max_value'='14950.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_addr_sk set stats ('row_count'='720000376', 'ndv'='6015811', 'num_nulls'='179848', 'min_value'='1', 'max_value'='6000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ship_mode_sk set stats ('row_count'='720000376', 'ndv'='20', 'num_nulls'='180017', 'min_value'='1', 'max_value'='20', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_warehouse_sk set stats ('row_count'='720000376', 'ndv'='20', 'num_nulls'='180105', 'min_value'='1', 'max_value'='20', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_company_name set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='6965') - """ - - sql """ - alter table store modify column s_gmt_offset set stats ('row_count'='1002', 'ndv'='4', 'num_nulls'='6', 'min_value'='-8.00', 'max_value'='-5.00', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_manager set stats ('row_count'='1002', 'ndv'='739', 'num_nulls'='0', 'min_value'='', 'max_value'='Zane Clifton', 'data_size'='12649') - """ - - sql """ - alter table store modify column s_street_number set stats ('row_count'='1002', 'ndv'='521', 'num_nulls'='0', 'min_value'='', 'max_value'='999', 'data_size'='2874') - """ - - sql """ - alter table time_dim modify column t_meal_time set stats ('row_count'='86400', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='lunch', 'data_size'='248400') - """ - - sql """ - alter table time_dim modify column t_time set stats ('row_count'='86400', 'ndv'='86684', 'num_nulls'='0', 'min_value'='0', 'max_value'='86399', 'data_size'='345600') - """ - - sql """ - alter table web_page modify column wp_creation_date_sk set stats ('row_count'='3000', 'ndv'='199', 'num_nulls'='33', 'min_value'='2450604', 'max_value'='2450815', 'data_size'='24000') - """ - - sql """ - alter table web_page modify column wp_customer_sk set stats ('row_count'='3000', 'ndv'='713', 'num_nulls'='2147', 'min_value'='9522', 'max_value'='11995685', 'data_size'='24000') - """ - - sql """ - alter table web_page modify column wp_max_ad_count set stats ('row_count'='3000', 'ndv'='5', 'num_nulls'='31', 'min_value'='0', 'max_value'='4', 'data_size'='12000') - """ - - sql """ - alter table web_page modify column wp_url set stats ('row_count'='3000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='http://www.foo.com', 'data_size'='53406') - """ - - sql """ - alter table store_returns modify column sr_refunded_cash set stats ('row_count'='287999764', 'ndv'='928470', 'num_nulls'='10081294', 'min_value'='0.00', 'max_value'='18173.96', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_tax set stats ('row_count'='287999764', 'ndv'='117247', 'num_nulls'='10081332', 'min_value'='0.00', 'max_value'='1682.04', 'data_size'='1151999056') - """ - - sql """ - alter table store_sales modify column ss_customer_sk set stats ('row_count'='2879987999', 'ndv'='12157481', 'num_nulls'='129590766', 'min_value'='1', 'max_value'='12000000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_hdemo_sk set stats ('row_count'='2879987999', 'ndv'='7251', 'num_nulls'='129594559', 'min_value'='1', 'max_value'='7200', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_store_sk set stats ('row_count'='2879987999', 'ndv'='499', 'num_nulls'='129572050', 'min_value'='1', 'max_value'='1000', 'data_size'='23039903992') - """ - - sql """ - alter table ship_mode modify column sm_ship_mode_id set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPAAAAAAA', 'data_size'='320') - """ - - sql """ - alter table ship_mode modify column sm_ship_mode_sk set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='160') - """ - - sql """ - alter table customer modify column c_first_name set stats ('row_count'='12000000', 'ndv'='5140', 'num_nulls'='0', 'min_value'='', 'max_value'='Zulma', 'data_size'='67593278') - """ - - sql """ - alter table customer modify column c_first_sales_date_sk set stats ('row_count'='12000000', 'ndv'='3644', 'num_nulls'='419856', 'min_value'='2448998', 'max_value'='2452648', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_first_shipto_date_sk set stats ('row_count'='12000000', 'ndv'='3644', 'num_nulls'='420769', 'min_value'='2449028', 'max_value'='2452678', 'data_size'='96000000') - """ - - sql """ - alter table customer_demographics modify column cd_dep_college_count set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='7683200') - """ - - sql """ - alter table date_dim modify column d_dow set stats ('row_count'='73049', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='73049', 'ndv'='801', 'num_nulls'='0', 'min_value'='1', 'max_value'='801', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_qoy set stats ('row_count'='73049', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_quarter_seq set stats ('row_count'='73049', 'ndv'='801', 'num_nulls'='0', 'min_value'='1', 'max_value'='801', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_street_name set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='', 'max_value'='Wilson Elm', 'data_size'='176') - """ - - sql """ - alter table warehouse modify column w_suite_number set stats ('row_count'='20', 'ndv'='18', 'num_nulls'='0', 'min_value'='', 'max_value'='Suite X', 'data_size'='150') - """ - - sql """ - alter table catalog_sales modify column cs_bill_cdemo_sk set stats ('row_count'='1439980416', 'ndv'='1916366', 'num_nulls'='7202134', 'min_value'='1', 'max_value'='1920800', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_bill_hdemo_sk set stats ('row_count'='1439980416', 'ndv'='7251', 'num_nulls'='7198837', 'min_value'='1', 'max_value'='7200', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ext_ship_cost set stats ('row_count'='1439980416', 'ndv'='573238', 'num_nulls'='7202537', 'min_value'='0.00', 'max_value'='14994.00', 'data_size'='5759921664') - """ - - sql """ - alter table call_center modify column cc_name set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='California', 'max_value'='Pacific Northwest_2', 'data_size'='572') - """ - - sql """ - alter table call_center modify column cc_street_name set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='1st', 'max_value'='Willow', 'data_size'='356') - """ - - sql """ - alter table call_center modify column cc_zip set stats ('row_count'='42', 'ndv'='19', 'num_nulls'='0', 'min_value'='18605', 'max_value'='98048', 'data_size'='210') - """ - - sql """ - alter table inventory modify column inv_quantity_on_hand set stats ('row_count'='783000000', 'ndv'='1006', 'num_nulls'='39153758', 'min_value'='0', 'max_value'='1000', 'data_size'='3132000000') - """ - - sql """ - alter table catalog_returns modify column cr_catalog_page_sk set stats ('row_count'='143996756', 'ndv'='17005', 'num_nulls'='2882502', 'min_value'='1', 'max_value'='25207', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_income_band_sk set stats ('row_count'='7200', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='57600') - """ - - sql """ - alter table catalog_page modify column cp_description set stats ('row_count'='30000', 'ndv'='30141', 'num_nulls'='0', 'min_value'='', 'max_value'='Youngsters worry both workers. Fascinating characters take cheap never alive studies. Direct, old', 'data_size'='2215634') - """ - - sql """ - alter table item modify column i_item_id set stats ('row_count'='300000', 'ndv'='150851', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPBAAA', 'data_size'='4800000') - """ - - sql """ - alter table web_returns modify column wr_account_credit set stats ('row_count'='71997522', 'ndv'='683955', 'num_nulls'='3241972', 'min_value'='0.00', 'max_value'='23166.33', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_net_loss set stats ('row_count'='71997522', 'ndv'='815608', 'num_nulls'='3240573', 'min_value'='0.50', 'max_value'='15887.84', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_return_amt set stats ('row_count'='71997522', 'ndv'='808311', 'num_nulls'='3238405', 'min_value'='0.00', 'max_value'='29191.00', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_return_amt_inc_tax set stats ('row_count'='71997522', 'ndv'='1359913', 'num_nulls'='3239765', 'min_value'='0.00', 'max_value'='30393.01', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_return_quantity set stats ('row_count'='71997522', 'ndv'='100', 'num_nulls'='3238643', 'min_value'='1', 'max_value'='100', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_returning_addr_sk set stats ('row_count'='71997522', 'ndv'='6015811', 'num_nulls'='3239658', 'min_value'='1', 'max_value'='6000000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_returning_customer_sk set stats ('row_count'='71997522', 'ndv'='12119220', 'num_nulls'='3237281', 'min_value'='1', 'max_value'='12000000', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_mkt_desc set stats ('row_count'='54', 'ndv'='38', 'num_nulls'='0', 'min_value'='Acres see else children. Mutual too', 'max_value'='Windows increase to a differences. Other parties might in', 'data_size'='3473') - """ - - sql """ - alter table web_site modify column web_mkt_id set stats ('row_count'='54', 'ndv'='6', 'num_nulls'='1', 'min_value'='1', 'max_value'='6', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_rec_end_date set stats ('row_count'='54', 'ndv'='3', 'num_nulls'='27', 'min_value'='1999-08-16', 'max_value'='2001-08-15', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_site_id set stats ('row_count'='54', 'ndv'='27', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPBAAAAAA', 'data_size'='864') - """ - - sql """ - alter table web_site modify column web_street_type set stats ('row_count'='54', 'ndv'='20', 'num_nulls'='0', 'min_value'='Ave', 'max_value'='Wy', 'data_size'='208') - """ - - sql """ - alter table promotion modify column p_channel_demo set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1479') - """ - - sql """ - alter table promotion modify column p_channel_details set stats ('row_count'='1500', 'ndv'='1490', 'num_nulls'='0', 'min_value'='', 'max_value'='Young, valuable companies watch walls. Payments can flour', 'data_size'='59126') - """ - - sql """ - alter table promotion modify column p_channel_event set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1482') - """ - - sql """ - alter table promotion modify column p_discount_active set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1473') - """ - - sql """ - alter table promotion modify column p_promo_sk set stats ('row_count'='1500', 'ndv'='1489', 'num_nulls'='0', 'min_value'='1', 'max_value'='1500', 'data_size'='12000') - """ - - sql """ - alter table promotion modify column p_purpose set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='10374') - """ - - sql """ - alter table web_sales modify column ws_bill_cdemo_sk set stats ('row_count'='720000376', 'ndv'='1916366', 'num_nulls'='179788', 'min_value'='1', 'max_value'='1920800', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_sold_date_sk set stats ('row_count'='720000376', 'ndv'='1820', 'num_nulls'='179921', 'min_value'='2450816', 'max_value'='2452642', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_web_site_sk set stats ('row_count'='720000376', 'ndv'='54', 'num_nulls'='179930', 'min_value'='1', 'max_value'='54', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_city set stats ('row_count'='1002', 'ndv'='55', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodlawn', 'data_size'='9238') - """ - - sql """ - alter table store modify column s_company_id set stats ('row_count'='1002', 'ndv'='1', 'num_nulls'='7', 'min_value'='1', 'max_value'='1', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_county set stats ('row_count'='1002', 'ndv'='28', 'num_nulls'='0', 'min_value'='', 'max_value'='Ziebach County', 'data_size'='14291') - """ - - sql """ - alter table store modify column s_geography_class set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='6972') - """ - - sql """ - alter table store modify column s_hours set stats ('row_count'='1002', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='8AM-8AM', 'data_size'='7088') - """ - - sql """ - alter table store modify column s_store_id set stats ('row_count'='1002', 'ndv'='501', 'num_nulls'='0', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPBAAAAA', 'data_size'='16032') - """ - - sql """ - alter table store modify column s_zip set stats ('row_count'='1002', 'ndv'='354', 'num_nulls'='0', 'min_value'='', 'max_value'='99454', 'data_size'='4975') - """ - - sql """ - alter table time_dim modify column t_am_pm set stats ('row_count'='86400', 'ndv'='2', 'num_nulls'='0', 'min_value'='AM', 'max_value'='PM', 'data_size'='172800') - """ - - sql """ - alter table time_dim modify column t_minute set stats ('row_count'='86400', 'ndv'='60', 'num_nulls'='0', 'min_value'='0', 'max_value'='59', 'data_size'='345600') - """ - - sql """ - alter table web_page modify column wp_web_page_id set stats ('row_count'='3000', 'ndv'='1501', 'num_nulls'='0', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPKAAAAA', 'data_size'='48000') - """ - - sql """ - alter table web_page modify column wp_web_page_sk set stats ('row_count'='3000', 'ndv'='2984', 'num_nulls'='0', 'min_value'='1', 'max_value'='3000', 'data_size'='24000') - """ - - sql """ - alter table store_returns modify column sr_return_amt set stats ('row_count'='287999764', 'ndv'='671228', 'num_nulls'='10080055', 'min_value'='0.00', 'max_value'='19434.00', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_returned_date_sk set stats ('row_count'='287999764', 'ndv'='2010', 'num_nulls'='10079607', 'min_value'='2450820', 'max_value'='2452822', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_ext_tax set stats ('row_count'='2879987999', 'ndv'='149597', 'num_nulls'='129588732', 'min_value'='0.00', 'max_value'='1797.48', 'data_size'='11519951996') - """ - - sql """ - alter table customer modify column c_current_cdemo_sk set stats ('row_count'='12000000', 'ndv'='1913901', 'num_nulls'='419895', 'min_value'='1', 'max_value'='1920800', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_customer_id set stats ('row_count'='12000000', 'ndv'='11921032', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAABAA', 'max_value'='AAAAAAAAPPPPPKAA', 'data_size'='192000000') - """ - - sql """ - alter table date_dim modify column d_current_day set stats ('row_count'='73049', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_current_month set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_date set stats ('row_count'='73049', 'ndv'='73250', 'num_nulls'='0', 'min_value'='1900-01-02', 'max_value'='2100-01-01', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_moy set stats ('row_count'='73049', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_gmt_offset set stats ('row_count'='20', 'ndv'='3', 'num_nulls'='1', 'min_value'='-7.00', 'max_value'='-5.00', 'data_size'='80') - """ - - sql """ - alter table warehouse modify column w_warehouse_sk set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='160') - """ - - sql """ - alter table warehouse modify column w_warehouse_sq_ft set stats ('row_count'='20', 'ndv'='19', 'num_nulls'='1', 'min_value'='73065', 'max_value'='977787', 'data_size'='80') - """ - - sql """ - alter table catalog_sales modify column cs_ext_sales_price set stats ('row_count'='1439980416', 'ndv'='1100662', 'num_nulls'='7199625', 'min_value'='0.00', 'max_value'='29943.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ext_wholesale_cost set stats ('row_count'='1439980416', 'ndv'='393180', 'num_nulls'='7199876', 'min_value'='1.00', 'max_value'='10000.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_item_sk set stats ('row_count'='1439980416', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid_inc_tax set stats ('row_count'='1439980416', 'ndv'='2422238', 'num_nulls'='7200702', 'min_value'='0.00', 'max_value'='32376.27', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ship_date_sk set stats ('row_count'='1439980416', 'ndv'='1933', 'num_nulls'='7200707', 'min_value'='2450817', 'max_value'='2452744', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_warehouse_sk set stats ('row_count'='1439980416', 'ndv'='20', 'num_nulls'='7200688', 'min_value'='1', 'max_value'='20', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_division set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_division_name set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='able', 'max_value'='pri', 'data_size'='164') - """ - - sql """ - alter table call_center modify column cc_manager set stats ('row_count'='42', 'ndv'='28', 'num_nulls'='0', 'min_value'='Alden Snyder', 'max_value'='Wayne Ray', 'data_size'='519') - """ - - sql """ - alter table call_center modify column cc_rec_start_date set stats ('row_count'='42', 'ndv'='4', 'num_nulls'='0', 'min_value'='1998-01-01', 'max_value'='2002-01-01', 'data_size'='168') - """ - - sql """ - alter table catalog_returns modify column cr_call_center_sk set stats ('row_count'='143996756', 'ndv'='42', 'num_nulls'='2881668', 'min_value'='1', 'max_value'='42', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_net_loss set stats ('row_count'='143996756', 'ndv'='911034', 'num_nulls'='2881704', 'min_value'='0.50', 'max_value'='16095.08', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_customer_sk set stats ('row_count'='143996756', 'ndv'='12156363', 'num_nulls'='2879017', 'min_value'='1', 'max_value'='12000000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_hdemo_sk set stats ('row_count'='143996756', 'ndv'='7251', 'num_nulls'='2882107', 'min_value'='1', 'max_value'='7200', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_returning_customer_sk set stats ('row_count'='143996756', 'ndv'='12157481', 'num_nulls'='2879023', 'min_value'='1', 'max_value'='12000000', 'data_size'='1151974048') - """ - - sql """ - alter table customer_address modify column ca_gmt_offset set stats ('row_count'='6000000', 'ndv'='6', 'num_nulls'='180219', 'min_value'='-10.00', 'max_value'='-5.00', 'data_size'='24000000') - """ - - sql """ - alter table item modify column i_color set stats ('row_count'='300000', 'ndv'='93', 'num_nulls'='0', 'min_value'='', 'max_value'='yellow', 'data_size'='1610293') - """ - - sql """ - alter table item modify column i_manufact set stats ('row_count'='300000', 'ndv'='1004', 'num_nulls'='0', 'min_value'='', 'max_value'='pripripri', 'data_size'='3379693') - """ - - sql """ - alter table item modify column i_product_name set stats ('row_count'='300000', 'ndv'='294994', 'num_nulls'='0', 'min_value'='', 'max_value'='pripripripripriought', 'data_size'='6849199') - """ - - sql """ - alter table web_returns modify column wr_returned_time_sk set stats ('row_count'='71997522', 'ndv'='87677', 'num_nulls'='3238574', 'min_value'='0', 'max_value'='86399', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_manager set stats ('row_count'='54', 'ndv'='40', 'num_nulls'='0', 'min_value'='', 'max_value'='William Young', 'data_size'='658') - """ - - sql """ - alter table web_site modify column web_mkt_class set stats ('row_count'='54', 'ndv'='40', 'num_nulls'='0', 'min_value'='', 'max_value'='Written, political plans show to the models. T', 'data_size'='1822') - """ - - sql """ - alter table web_site modify column web_rec_start_date set stats ('row_count'='54', 'ndv'='4', 'num_nulls'='2', 'min_value'='1997-08-16', 'max_value'='2001-08-16', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_street_number set stats ('row_count'='54', 'ndv'='36', 'num_nulls'='0', 'min_value'='', 'max_value'='983', 'data_size'='154') - """ - - sql """ - alter table promotion modify column p_channel_catalog set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1482') - """ - - sql """ - alter table promotion modify column p_promo_id set stats ('row_count'='1500', 'ndv'='1519', 'num_nulls'='0', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPEAAAAA', 'data_size'='24000') - """ - - sql """ - alter table web_sales modify column ws_bill_customer_sk set stats ('row_count'='720000376', 'ndv'='12103729', 'num_nulls'='179817', 'min_value'='1', 'max_value'='12000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_list_price set stats ('row_count'='720000376', 'ndv'='29396', 'num_nulls'='180053', 'min_value'='1.00', 'max_value'='300.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_sales_price set stats ('row_count'='720000376', 'ndv'='29288', 'num_nulls'='180005', 'min_value'='0.00', 'max_value'='300.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_hdemo_sk set stats ('row_count'='720000376', 'ndv'='7251', 'num_nulls'='179824', 'min_value'='1', 'max_value'='7200', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_closed_date_sk set stats ('row_count'='1002', 'ndv'='163', 'num_nulls'='729', 'min_value'='2450820', 'max_value'='2451313', 'data_size'='8016') - """ - - sql """ - alter table store modify column s_division_id set stats ('row_count'='1002', 'ndv'='1', 'num_nulls'='6', 'min_value'='1', 'max_value'='1', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_market_desc set stats ('row_count'='1002', 'ndv'='765', 'num_nulls'='0', 'min_value'='', 'max_value'='Yesterday left factors handle continuing co', 'data_size'='57638') - """ - - sql """ - alter table store modify column s_market_id set stats ('row_count'='1002', 'ndv'='10', 'num_nulls'='8', 'min_value'='1', 'max_value'='10', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_state set stats ('row_count'='1002', 'ndv'='22', 'num_nulls'='0', 'min_value'='', 'max_value'='WV', 'data_size'='1994') - """ - - sql """ - alter table store modify column s_store_sk set stats ('row_count'='1002', 'ndv'='988', 'num_nulls'='0', 'min_value'='1', 'max_value'='1002', 'data_size'='8016') - """ - - sql """ - alter table store modify column s_street_name set stats ('row_count'='1002', 'ndv'='549', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodland Oak', 'data_size'='8580') - """ - - sql """ - alter table web_page modify column wp_access_date_sk set stats ('row_count'='3000', 'ndv'='101', 'num_nulls'='31', 'min_value'='2452548', 'max_value'='2452648', 'data_size'='24000') - """ - - sql """ - alter table web_page modify column wp_char_count set stats ('row_count'='3000', 'ndv'='1883', 'num_nulls'='42', 'min_value'='303', 'max_value'='8523', 'data_size'='12000') - """ - - sql """ - alter table store_returns modify column sr_addr_sk set stats ('row_count'='287999764', 'ndv'='6015811', 'num_nulls'='10082311', 'min_value'='1', 'max_value'='6000000', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_return_time_sk set stats ('row_count'='287999764', 'ndv'='32660', 'num_nulls'='10082805', 'min_value'='28799', 'max_value'='61199', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_store_sk set stats ('row_count'='287999764', 'ndv'='499', 'num_nulls'='10081871', 'min_value'='1', 'max_value'='1000', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_coupon_amt set stats ('row_count'='2879987999', 'ndv'='1161208', 'num_nulls'='129609101', 'min_value'='0.00', 'max_value'='19778.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_sales_price set stats ('row_count'='2879987999', 'ndv'='19780', 'num_nulls'='129598061', 'min_value'='0.00', 'max_value'='200.00', 'data_size'='11519951996') - """ - - sql """ - alter table customer modify column c_birth_country set stats ('row_count'='12000000', 'ndv'='211', 'num_nulls'='0', 'min_value'='', 'max_value'='ZIMBABWE', 'data_size'='100750845') - """ - - sql """ - alter table customer modify column c_birth_month set stats ('row_count'='12000000', 'ndv'='12', 'num_nulls'='419629', 'min_value'='1', 'max_value'='12', 'data_size'='48000000') - """ - - sql """ - alter table customer modify column c_customer_sk set stats ('row_count'='12000000', 'ndv'='12157481', 'num_nulls'='0', 'min_value'='1', 'max_value'='12000000', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_email_address set stats ('row_count'='12000000', 'ndv'='11642077', 'num_nulls'='0', 'min_value'='', 'max_value'='Zulma.Young@aDhzZzCzYN.edu', 'data_size'='318077849') - """ - - sql """ - alter table customer modify column c_last_review_date_sk set stats ('row_count'='12000000', 'ndv'='366', 'num_nulls'='419900', 'min_value'='2452283', 'max_value'='2452648', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_preferred_cust_flag set stats ('row_count'='12000000', 'ndv'='3', 'num_nulls'='0', 'min_value'='', 'max_value'='Y', 'data_size'='11580510') - """ - - sql """ - alter table dbgen_version modify column dv_version set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='3.2.0', 'max_value'='3.2.0', 'data_size'='5') - """ - - sql """ - alter table customer_demographics modify column cd_purchase_estimate set stats ('row_count'='1920800', 'ndv'='20', 'num_nulls'='0', 'min_value'='500', 'max_value'='10000', 'data_size'='7683200') - """ - - sql """ - alter table reason modify column r_reason_id set stats ('row_count'='65', 'ndv'='65', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPDAAAAAA', 'data_size'='1040') - """ - - sql """ - alter table reason modify column r_reason_sk set stats ('row_count'='65', 'ndv'='65', 'num_nulls'='0', 'min_value'='1', 'max_value'='65', 'data_size'='520') - """ - - sql """ - alter table date_dim modify column d_current_week set stats ('row_count'='73049', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_first_dom set stats ('row_count'='73049', 'ndv'='2410', 'num_nulls'='0', 'min_value'='2415021', 'max_value'='2488070', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_fy_year set stats ('row_count'='73049', 'ndv'='202', 'num_nulls'='0', 'min_value'='1900', 'max_value'='2100', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_last_dom set stats ('row_count'='73049', 'ndv'='2419', 'num_nulls'='0', 'min_value'='2415020', 'max_value'='2488372', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_month_seq set stats ('row_count'='73049', 'ndv'='2398', 'num_nulls'='0', 'min_value'='0', 'max_value'='2400', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_quarter_name set stats ('row_count'='73049', 'ndv'='799', 'num_nulls'='0', 'min_value'='1900Q1', 'max_value'='2100Q1', 'data_size'='438294') - """ - - sql """ - alter table warehouse modify column w_county set stats ('row_count'='20', 'ndv'='14', 'num_nulls'='0', 'min_value'='Bronx County', 'max_value'='Ziebach County', 'data_size'='291') - """ - - sql """ - alter table warehouse modify column w_street_number set stats ('row_count'='20', 'ndv'='19', 'num_nulls'='0', 'min_value'='', 'max_value'='957', 'data_size'='54') - """ - - sql """ - alter table warehouse modify column w_warehouse_name set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='', 'max_value'='Therefore urg', 'data_size'='307') - """ - - sql """ - alter table catalog_sales modify column cs_ext_discount_amt set stats ('row_count'='1439980416', 'ndv'='1100115', 'num_nulls'='7201054', 'min_value'='0.00', 'max_value'='29982.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid_inc_ship_tax set stats ('row_count'='1439980416', 'ndv'='3312360', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='46593.36', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_promo_sk set stats ('row_count'='1439980416', 'ndv'='1489', 'num_nulls'='7202844', 'min_value'='1', 'max_value'='1500', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_call_center_id set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPBAAAAAA', 'data_size'='672') - """ - - sql """ - alter table call_center modify column cc_employees set stats ('row_count'='42', 'ndv'='30', 'num_nulls'='0', 'min_value'='69020', 'max_value'='6879074', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_suite_number set stats ('row_count'='42', 'ndv'='18', 'num_nulls'='0', 'min_value'='Suite 0', 'max_value'='Suite W', 'data_size'='326') - """ - - sql """ - alter table catalog_returns modify column cr_item_sk set stats ('row_count'='143996756', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_reason_sk set stats ('row_count'='143996756', 'ndv'='65', 'num_nulls'='2881950', 'min_value'='1', 'max_value'='65', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_return_ship_cost set stats ('row_count'='143996756', 'ndv'='483467', 'num_nulls'='2883436', 'min_value'='0.00', 'max_value'='14273.28', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_ship_mode_sk set stats ('row_count'='143996756', 'ndv'='20', 'num_nulls'='2879879', 'min_value'='1', 'max_value'='20', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_store_credit set stats ('row_count'='143996756', 'ndv'='802237', 'num_nulls'='2880469', 'min_value'='0.00', 'max_value'='23215.15', 'data_size'='575987024') - """ - - sql """ - alter table customer_address modify column ca_city set stats ('row_count'='6000000', 'ndv'='977', 'num_nulls'='0', 'min_value'='', 'max_value'='Zion', 'data_size'='52096290') - """ - - sql """ - alter table customer_address modify column ca_state set stats ('row_count'='6000000', 'ndv'='52', 'num_nulls'='0', 'min_value'='', 'max_value'='WY', 'data_size'='11640128') - """ - - sql """ - alter table customer_address modify column ca_street_name set stats ('row_count'='6000000', 'ndv'='8173', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodland Woodland', 'data_size'='50697257') - """ - - sql """ - alter table customer_address modify column ca_street_type set stats ('row_count'='6000000', 'ndv'='21', 'num_nulls'='0', 'min_value'='', 'max_value'='Wy', 'data_size'='24441630') - """ - - sql """ - alter table catalog_page modify column cp_catalog_number set stats ('row_count'='30000', 'ndv'='109', 'num_nulls'='297', 'min_value'='1', 'max_value'='109', 'data_size'='120000') - """ - - sql """ - alter table catalog_page modify column cp_catalog_page_number set stats ('row_count'='30000', 'ndv'='279', 'num_nulls'='294', 'min_value'='1', 'max_value'='277', 'data_size'='120000') - """ - - sql """ - alter table catalog_page modify column cp_catalog_page_sk set stats ('row_count'='30000', 'ndv'='30439', 'num_nulls'='0', 'min_value'='1', 'max_value'='30000', 'data_size'='240000') - """ - - sql """ - alter table catalog_page modify column cp_start_date_sk set stats ('row_count'='30000', 'ndv'='91', 'num_nulls'='286', 'min_value'='2450815', 'max_value'='2453005', 'data_size'='120000') - """ - - sql """ - alter table item modify column i_rec_start_date set stats ('row_count'='300000', 'ndv'='4', 'num_nulls'='784', 'min_value'='1997-10-27', 'max_value'='2001-10-27', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_units set stats ('row_count'='300000', 'ndv'='22', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='1253652') - """ - - sql """ - alter table web_returns modify column wr_refunded_hdemo_sk set stats ('row_count'='71997522', 'ndv'='7251', 'num_nulls'='3238545', 'min_value'='1', 'max_value'='7200', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_return_ship_cost set stats ('row_count'='71997522', 'ndv'='451263', 'num_nulls'='3239048', 'min_value'='0.00', 'max_value'='14352.10', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_returned_date_sk set stats ('row_count'='71997522', 'ndv'='2188', 'num_nulls'='3239259', 'min_value'='2450819', 'max_value'='2453002', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_returning_cdemo_sk set stats ('row_count'='71997522', 'ndv'='1916366', 'num_nulls'='3239192', 'min_value'='1', 'max_value'='1920800', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_suite_number set stats ('row_count'='54', 'ndv'='38', 'num_nulls'='0', 'min_value'='Suite 100', 'max_value'='Suite Y', 'data_size'='430') - """ - - sql """ - alter table promotion modify column p_start_date_sk set stats ('row_count'='1500', 'ndv'='685', 'num_nulls'='23', 'min_value'='2450096', 'max_value'='2450915', 'data_size'='12000') - """ - - sql """ - alter table web_sales modify column ws_coupon_amt set stats ('row_count'='720000376', 'ndv'='1505315', 'num_nulls'='179933', 'min_value'='0.00', 'max_value'='28824.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ext_wholesale_cost set stats ('row_count'='720000376', 'ndv'='393180', 'num_nulls'='180060', 'min_value'='1.00', 'max_value'='10000.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_paid_inc_ship set stats ('row_count'='720000376', 'ndv'='2414838', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='44263.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_date_sk set stats ('row_count'='720000376', 'ndv'='1952', 'num_nulls'='180011', 'min_value'='2450817', 'max_value'='2452762', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_web_page_sk set stats ('row_count'='720000376', 'ndv'='2984', 'num_nulls'='179732', 'min_value'='1', 'max_value'='3000', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_country set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='United States', 'data_size'='12961') - """ - - sql """ - alter table store modify column s_store_name set stats ('row_count'='1002', 'ndv'='11', 'num_nulls'='0', 'min_value'='', 'max_value'='pri', 'data_size'='3916') - """ - - sql """ - alter table time_dim modify column t_second set stats ('row_count'='86400', 'ndv'='60', 'num_nulls'='0', 'min_value'='0', 'max_value'='59', 'data_size'='345600') - """ - - sql """ - alter table time_dim modify column t_sub_shift set stats ('row_count'='86400', 'ndv'='4', 'num_nulls'='0', 'min_value'='afternoon', 'max_value'='night', 'data_size'='597600') - """ - - sql """ - alter table web_page modify column wp_image_count set stats ('row_count'='3000', 'ndv'='7', 'num_nulls'='26', 'min_value'='1', 'max_value'='7', 'data_size'='12000') - """ - - sql """ - alter table web_page modify column wp_type set stats ('row_count'='3000', 'ndv'='8', 'num_nulls'='0', 'min_value'='', 'max_value'='welcome', 'data_size'='18867') - """ - - sql """ - alter table store_returns modify column sr_customer_sk set stats ('row_count'='287999764', 'ndv'='12157481', 'num_nulls'='10081624', 'min_value'='1', 'max_value'='12000000', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_hdemo_sk set stats ('row_count'='287999764', 'ndv'='7251', 'num_nulls'='10083275', 'min_value'='1', 'max_value'='7200', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_addr_sk set stats ('row_count'='2879987999', 'ndv'='6015811', 'num_nulls'='129589799', 'min_value'='1', 'max_value'='6000000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_item_sk set stats ('row_count'='2879987999', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_quantity set stats ('row_count'='2879987999', 'ndv'='100', 'num_nulls'='129584258', 'min_value'='1', 'max_value'='100', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_ticket_number set stats ('row_count'='2879987999', 'ndv'='238830448', 'num_nulls'='0', 'min_value'='1', 'max_value'='240000000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_wholesale_cost set stats ('row_count'='2879987999', 'ndv'='9905', 'num_nulls'='129590273', 'min_value'='1.00', 'max_value'='100.00', 'data_size'='11519951996') - """ - - sql """ - alter table ship_mode modify column sm_type set stats ('row_count'='20', 'ndv'='6', 'num_nulls'='0', 'min_value'='EXPRESS', 'max_value'='TWO DAY', 'data_size'='150') - """ - - sql """ - alter table customer modify column c_current_addr_sk set stats ('row_count'='12000000', 'ndv'='5243359', 'num_nulls'='0', 'min_value'='3', 'max_value'='6000000', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_last_name set stats ('row_count'='12000000', 'ndv'='4990', 'num_nulls'='0', 'min_value'='', 'max_value'='Zuniga', 'data_size'='70991730') - """ - - sql """ - alter table dbgen_version modify column dv_cmdline_args set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='-SCALE 1000 -PARALLEL 64 -CHILD 1 -TERMINATE N -DIR /mnt/datadisk0/tpcds1t/tpcds-data', 'max_value'='-SCALE 1000 -PARALLEL 64 -CHILD 1 -TERMINATE N -DIR /mnt/datadisk0/tpcds1t/tpcds-data', 'data_size'='86') - """ - - sql """ - alter table date_dim modify column d_current_quarter set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_date_sk set stats ('row_count'='73049', 'ndv'='73042', 'num_nulls'='0', 'min_value'='2415022', 'max_value'='2488070', 'data_size'='584392') - """ - - sql """ - alter table date_dim modify column d_holiday set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table warehouse modify column w_country set stats ('row_count'='20', 'ndv'='1', 'num_nulls'='0', 'min_value'='United States', 'max_value'='United States', 'data_size'='260') - """ - - sql """ - alter table warehouse modify column w_state set stats ('row_count'='20', 'ndv'='13', 'num_nulls'='0', 'min_value'='AL', 'max_value'='TN', 'data_size'='40') - """ - - sql """ - alter table catalog_sales modify column cs_bill_addr_sk set stats ('row_count'='1439980416', 'ndv'='6015811', 'num_nulls'='7199539', 'min_value'='1', 'max_value'='6000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_bill_customer_sk set stats ('row_count'='1439980416', 'ndv'='12157481', 'num_nulls'='7201919', 'min_value'='1', 'max_value'='12000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid set stats ('row_count'='1439980416', 'ndv'='1809875', 'num_nulls'='7197668', 'min_value'='0.00', 'max_value'='29943.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ship_addr_sk set stats ('row_count'='1439980416', 'ndv'='6015811', 'num_nulls'='7198232', 'min_value'='1', 'max_value'='6000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ship_mode_sk set stats ('row_count'='1439980416', 'ndv'='20', 'num_nulls'='7201083', 'min_value'='1', 'max_value'='20', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_sold_date_sk set stats ('row_count'='1439980416', 'ndv'='1835', 'num_nulls'='7203326', 'min_value'='2450815', 'max_value'='2452654', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_sold_time_sk set stats ('row_count'='1439980416', 'ndv'='87677', 'num_nulls'='7201329', 'min_value'='0', 'max_value'='86399', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_wholesale_cost set stats ('row_count'='1439980416', 'ndv'='9905', 'num_nulls'='7201098', 'min_value'='1.00', 'max_value'='100.00', 'data_size'='5759921664') - """ - - sql """ - alter table call_center modify column cc_company_name set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='able', 'max_value'='pri', 'data_size'='160') - """ - - sql """ - alter table call_center modify column cc_market_manager set stats ('row_count'='42', 'ndv'='35', 'num_nulls'='0', 'min_value'='Cesar Allen', 'max_value'='William Larsen', 'data_size'='524') - """ - - sql """ - alter table call_center modify column cc_mkt_id set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_street_type set stats ('row_count'='42', 'ndv'='11', 'num_nulls'='0', 'min_value'='Avenue', 'max_value'='Way', 'data_size'='184') - """ - - sql """ - alter table catalog_returns modify column cr_return_tax set stats ('row_count'='143996756', 'ndv'='149828', 'num_nulls'='2881611', 'min_value'='0.00', 'max_value'='2511.58', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returning_cdemo_sk set stats ('row_count'='143996756', 'ndv'='1916366', 'num_nulls'='2880543', 'min_value'='1', 'max_value'='1920800', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_returning_hdemo_sk set stats ('row_count'='143996756', 'ndv'='7251', 'num_nulls'='2882692', 'min_value'='1', 'max_value'='7200', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_reversed_charge set stats ('row_count'='143996756', 'ndv'='802509', 'num_nulls'='2881215', 'min_value'='0.00', 'max_value'='24033.84', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_warehouse_sk set stats ('row_count'='143996756', 'ndv'='20', 'num_nulls'='2882192', 'min_value'='1', 'max_value'='20', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_demo_sk set stats ('row_count'='7200', 'ndv'='7251', 'num_nulls'='0', 'min_value'='1', 'max_value'='7200', 'data_size'='57600') - """ - - sql """ - alter table household_demographics modify column hd_vehicle_count set stats ('row_count'='7200', 'ndv'='6', 'num_nulls'='0', 'min_value'='-1', 'max_value'='4', 'data_size'='28800') - """ - - sql """ - alter table customer_address modify column ca_zip set stats ('row_count'='6000000', 'ndv'='9253', 'num_nulls'='0', 'min_value'='', 'max_value'='99981', 'data_size'='29097610') - """ - - sql """ - alter table income_band modify column ib_income_band_sk set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='160') - """ - - sql """ - alter table catalog_page modify column cp_type set stats ('row_count'='30000', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='quarterly', 'data_size'='227890') - """ - - sql """ - alter table item modify column i_brand set stats ('row_count'='300000', 'ndv'='714', 'num_nulls'='0', 'min_value'='', 'max_value'='univunivamalg #9', 'data_size'='4834917') - """ - - sql """ - alter table item modify column i_formulation set stats ('row_count'='300000', 'ndv'='224757', 'num_nulls'='0', 'min_value'='', 'max_value'='yellow98911509228741', 'data_size'='5984460') - """ - - sql """ - alter table item modify column i_item_desc set stats ('row_count'='300000', 'ndv'='217721', 'num_nulls'='0', 'min_value'='', 'max_value'='Youngsters used to save quite colour', 'data_size'='30093342') - """ - - sql """ - alter table web_returns modify column wr_fee set stats ('row_count'='71997522', 'ndv'='9958', 'num_nulls'='3238926', 'min_value'='0.50', 'max_value'='100.00', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_item_sk set stats ('row_count'='71997522', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_reason_sk set stats ('row_count'='71997522', 'ndv'='65', 'num_nulls'='3238897', 'min_value'='1', 'max_value'='65', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_refunded_customer_sk set stats ('row_count'='71997522', 'ndv'='12117831', 'num_nulls'='3242433', 'min_value'='1', 'max_value'='12000000', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_city set stats ('row_count'='54', 'ndv'='31', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodlawn', 'data_size'='491') - """ - - sql """ - alter table web_site modify column web_close_date_sk set stats ('row_count'='54', 'ndv'='18', 'num_nulls'='10', 'min_value'='2441265', 'max_value'='2446218', 'data_size'='432') - """ - - sql """ - alter table web_site modify column web_company_id set stats ('row_count'='54', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_company_name set stats ('row_count'='54', 'ndv'='7', 'num_nulls'='0', 'min_value'='', 'max_value'='pri', 'data_size'='203') - """ - - sql """ - alter table web_site modify column web_county set stats ('row_count'='54', 'ndv'='25', 'num_nulls'='0', 'min_value'='', 'max_value'='Williamson County', 'data_size'='762') - """ - - sql """ - alter table web_site modify column web_name set stats ('row_count'='54', 'ndv'='10', 'num_nulls'='0', 'min_value'='', 'max_value'='site_8', 'data_size'='312') - """ - - sql """ - alter table web_site modify column web_open_date_sk set stats ('row_count'='54', 'ndv'='27', 'num_nulls'='1', 'min_value'='2450373', 'max_value'='2450807', 'data_size'='432') - """ - - sql """ - alter table promotion modify column p_channel_dmail set stats ('row_count'='1500', 'ndv'='3', 'num_nulls'='0', 'min_value'='', 'max_value'='Y', 'data_size'='1483') - """ - - sql """ - alter table promotion modify column p_channel_press set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1481') - """ - - sql """ - alter table promotion modify column p_channel_radio set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1479') - """ - - sql """ - alter table promotion modify column p_cost set stats ('row_count'='1500', 'ndv'='1', 'num_nulls'='18', 'min_value'='1000.00', 'max_value'='1000.00', 'data_size'='12000') - """ - - sql """ - alter table web_sales modify column ws_ext_tax set stats ('row_count'='720000376', 'ndv'='211413', 'num_nulls'='179695', 'min_value'='0.00', 'max_value'='2682.90', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_item_sk set stats ('row_count'='720000376', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_net_paid set stats ('row_count'='720000376', 'ndv'='1749360', 'num_nulls'='179970', 'min_value'='0.00', 'max_value'='29810.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_paid_inc_ship_tax set stats ('row_count'='720000376', 'ndv'='3224829', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='46004.19', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_paid_inc_tax set stats ('row_count'='720000376', 'ndv'='2354996', 'num_nulls'='179972', 'min_value'='0.00', 'max_value'='32492.90', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_order_number set stats ('row_count'='720000376', 'ndv'='60401176', 'num_nulls'='0', 'min_value'='1', 'max_value'='60000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_quantity set stats ('row_count'='720000376', 'ndv'='100', 'num_nulls'='179781', 'min_value'='1', 'max_value'='100', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_cdemo_sk set stats ('row_count'='720000376', 'ndv'='1916366', 'num_nulls'='180290', 'min_value'='1', 'max_value'='1920800', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_sold_time_sk set stats ('row_count'='720000376', 'ndv'='87677', 'num_nulls'='179980', 'min_value'='0', 'max_value'='86399', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_street_type set stats ('row_count'='1002', 'ndv'='21', 'num_nulls'='0', 'min_value'='', 'max_value'='Wy', 'data_size'='4189') - """ - - sql """ - alter table web_page modify column wp_autogen_flag set stats ('row_count'='3000', 'ndv'='3', 'num_nulls'='0', 'min_value'='', 'max_value'='Y', 'data_size'='2962') - """ - - sql """ - alter table web_page modify column wp_rec_start_date set stats ('row_count'='3000', 'ndv'='4', 'num_nulls'='29', 'min_value'='1997-09-03', 'max_value'='2001-09-03', 'data_size'='12000') - """ - - sql """ - alter table store_returns modify column sr_net_loss set stats ('row_count'='287999764', 'ndv'='714210', 'num_nulls'='10080716', 'min_value'='0.50', 'max_value'='10776.08', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_amt_inc_tax set stats ('row_count'='287999764', 'ndv'='1259368', 'num_nulls'='10076879', 'min_value'='0.00', 'max_value'='20454.63', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_quantity set stats ('row_count'='287999764', 'ndv'='100', 'num_nulls'='10082815', 'min_value'='1', 'max_value'='100', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_ship_cost set stats ('row_count'='287999764', 'ndv'='355844', 'num_nulls'='10081927', 'min_value'='0.00', 'max_value'='9767.34', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_reversed_charge set stats ('row_count'='287999764', 'ndv'='700618', 'num_nulls'='10085976', 'min_value'='0.00', 'max_value'='17339.42', 'data_size'='1151999056') - """ - - sql """ - alter table store_sales modify column ss_net_paid_inc_tax set stats ('row_count'='2879987999', 'ndv'='1681767', 'num_nulls'='129609050', 'min_value'='0.00', 'max_value'='21769.48', 'data_size'='11519951996') - """ - - sql """ - alter table customer modify column c_birth_day set stats ('row_count'='12000000', 'ndv'='31', 'num_nulls'='420361', 'min_value'='1', 'max_value'='31', 'data_size'='48000000') - """ - - sql """ - alter table customer_demographics modify column cd_credit_rating set stats ('row_count'='1920800', 'ndv'='4', 'num_nulls'='0', 'min_value'='Good', 'max_value'='Unknown', 'data_size'='13445600') - """ - - sql """ - alter table customer_demographics modify column cd_demo_sk set stats ('row_count'='1920800', 'ndv'='1916366', 'num_nulls'='0', 'min_value'='1', 'max_value'='1920800', 'data_size'='15366400') - """ - - sql """ - alter table customer_demographics modify column cd_dep_count set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='7683200') - """ - - sql """ - alter table customer_demographics modify column cd_education_status set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='2 yr Degree', 'max_value'='Unknown', 'data_size'='18384800') - """ - - sql """ - alter table customer_demographics modify column cd_gender set stats ('row_count'='1920800', 'ndv'='2', 'num_nulls'='0', 'min_value'='F', 'max_value'='M', 'data_size'='1920800') - """ - - sql """ - alter table customer_demographics modify column cd_marital_status set stats ('row_count'='1920800', 'ndv'='5', 'num_nulls'='0', 'min_value'='D', 'max_value'='W', 'data_size'='1920800') - """ - - sql """ - alter table date_dim modify column d_date_id set stats ('row_count'='73049', 'ndv'='72907', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAFCAA', 'max_value'='AAAAAAAAPPPPECAA', 'data_size'='1168784') - """ - - sql """ - alter table date_dim modify column d_fy_week_seq set stats ('row_count'='73049', 'ndv'='10448', 'num_nulls'='0', 'min_value'='1', 'max_value'='10436', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_year set stats ('row_count'='73049', 'ndv'='202', 'num_nulls'='0', 'min_value'='1900', 'max_value'='2100', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_warehouse_id set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPAAAAAAA', 'data_size'='320') - """ - - sql """ - alter table catalog_sales modify column cs_ext_list_price set stats ('row_count'='1439980416', 'ndv'='1160303', 'num_nulls'='7199542', 'min_value'='1.00', 'max_value'='30000.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ext_tax set stats ('row_count'='1439980416', 'ndv'='215267', 'num_nulls'='7200412', 'min_value'='0.00', 'max_value'='2673.27', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_quantity set stats ('row_count'='1439980416', 'ndv'='100', 'num_nulls'='7202885', 'min_value'='1', 'max_value'='100', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ship_cdemo_sk set stats ('row_count'='1439980416', 'ndv'='1916366', 'num_nulls'='7200151', 'min_value'='1', 'max_value'='1920800', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ship_customer_sk set stats ('row_count'='1439980416', 'ndv'='12157481', 'num_nulls'='7201507', 'min_value'='1', 'max_value'='12000000', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_company set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_mkt_desc set stats ('row_count'='42', 'ndv'='33', 'num_nulls'='0', 'min_value'='Arms increase controversial, present so', 'max_value'='Young tests could buy comfortable, local users; o', 'data_size'='2419') - """ - - sql """ - alter table call_center modify column cc_open_date_sk set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='2450794', 'max_value'='2451146', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_rec_end_date set stats ('row_count'='42', 'ndv'='3', 'num_nulls'='21', 'min_value'='2000-01-01', 'max_value'='2001-12-31', 'data_size'='168') - """ - - sql """ - alter table catalog_returns modify column cr_order_number set stats ('row_count'='143996756', 'ndv'='93476424', 'num_nulls'='0', 'min_value'='2', 'max_value'='160000000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_return_amount set stats ('row_count'='143996756', 'ndv'='882831', 'num_nulls'='2880424', 'min_value'='0.00', 'max_value'='28805.04', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returned_date_sk set stats ('row_count'='143996756', 'ndv'='2108', 'num_nulls'='0', 'min_value'='2450821', 'max_value'='2452924', 'data_size'='1151974048') - """ - - sql """ - alter table income_band modify column ib_upper_bound set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='10000', 'max_value'='200000', 'data_size'='80') - """ - - sql """ - alter table catalog_page modify column cp_department set stats ('row_count'='30000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='DEPARTMENT', 'data_size'='297110') - """ - - sql """ - alter table catalog_page modify column cp_end_date_sk set stats ('row_count'='30000', 'ndv'='97', 'num_nulls'='302', 'min_value'='2450844', 'max_value'='2453186', 'data_size'='120000') - """ - - sql """ - alter table item modify column i_brand_id set stats ('row_count'='300000', 'ndv'='951', 'num_nulls'='763', 'min_value'='1001001', 'max_value'='10016017', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_category set stats ('row_count'='300000', 'ndv'='11', 'num_nulls'='0', 'min_value'='', 'max_value'='Women', 'data_size'='1766742') - """ - - sql """ - alter table item modify column i_class_id set stats ('row_count'='300000', 'ndv'='16', 'num_nulls'='722', 'min_value'='1', 'max_value'='16', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_item_sk set stats ('row_count'='300000', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='2400000') - """ - - sql """ - alter table item modify column i_manufact_id set stats ('row_count'='300000', 'ndv'='1005', 'num_nulls'='761', 'min_value'='1', 'max_value'='1000', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_wholesale_cost set stats ('row_count'='300000', 'ndv'='7243', 'num_nulls'='740', 'min_value'='0.02', 'max_value'='89.49', 'data_size'='1200000') - """ - - sql """ - alter table web_returns modify column wr_refunded_cdemo_sk set stats ('row_count'='71997522', 'ndv'='1916366', 'num_nulls'='3240352', 'min_value'='1', 'max_value'='1920800', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_return_tax set stats ('row_count'='71997522', 'ndv'='137392', 'num_nulls'='3237729', 'min_value'='0.00', 'max_value'='2551.16', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_returning_hdemo_sk set stats ('row_count'='71997522', 'ndv'='7251', 'num_nulls'='3238239', 'min_value'='1', 'max_value'='7200', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_web_page_sk set stats ('row_count'='71997522', 'ndv'='2984', 'num_nulls'='3240387', 'min_value'='1', 'max_value'='3000', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_class set stats ('row_count'='54', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='371') - """ - - sql """ - alter table web_site modify column web_zip set stats ('row_count'='54', 'ndv'='32', 'num_nulls'='0', 'min_value'='14593', 'max_value'='99431', 'data_size'='270') - """ - - sql """ - alter table promotion modify column p_channel_email set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1480') - """ - - sql """ - alter table promotion modify column p_item_sk set stats ('row_count'='1500', 'ndv'='1467', 'num_nulls'='19', 'min_value'='184', 'max_value'='299990', 'data_size'='12000') - """ - - sql """ - alter table promotion modify column p_promo_name set stats ('row_count'='1500', 'ndv'='11', 'num_nulls'='0', 'min_value'='', 'max_value'='pri', 'data_size'='5896') - """ - - sql """ - alter table web_sales modify column ws_ext_discount_amt set stats ('row_count'='720000376', 'ndv'='1093513', 'num_nulls'='179851', 'min_value'='0.00', 'max_value'='29982.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ext_list_price set stats ('row_count'='720000376', 'ndv'='1160303', 'num_nulls'='179866', 'min_value'='1.00', 'max_value'='30000.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_wholesale_cost set stats ('row_count'='720000376', 'ndv'='9905', 'num_nulls'='179834', 'min_value'='1.00', 'max_value'='100.00', 'data_size'='2880001504') - """ - - sql """ - alter table store modify column s_market_manager set stats ('row_count'='1002', 'ndv'='732', 'num_nulls'='0', 'min_value'='', 'max_value'='Zane Perez', 'data_size'='12823') - """ - - sql """ - alter table store modify column s_number_employees set stats ('row_count'='1002', 'ndv'='101', 'num_nulls'='8', 'min_value'='200', 'max_value'='300', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_rec_end_date set stats ('row_count'='1002', 'ndv'='3', 'num_nulls'='501', 'min_value'='1999-03-13', 'max_value'='2001-03-12', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_rec_start_date set stats ('row_count'='1002', 'ndv'='4', 'num_nulls'='7', 'min_value'='1997-03-13', 'max_value'='2001-03-13', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_suite_number set stats ('row_count'='1002', 'ndv'='76', 'num_nulls'='0', 'min_value'='', 'max_value'='Suite Y', 'data_size'='7866') - """ - - sql """ - alter table time_dim modify column t_hour set stats ('row_count'='86400', 'ndv'='24', 'num_nulls'='0', 'min_value'='0', 'max_value'='23', 'data_size'='345600') - """ - - sql """ - alter table time_dim modify column t_shift set stats ('row_count'='86400', 'ndv'='3', 'num_nulls'='0', 'min_value'='first', 'max_value'='third', 'data_size'='460800') - """ - - sql """ - alter table web_page modify column wp_link_count set stats ('row_count'='3000', 'ndv'='24', 'num_nulls'='27', 'min_value'='2', 'max_value'='25', 'data_size'='12000') - """ - - sql """ - alter table web_page modify column wp_rec_end_date set stats ('row_count'='3000', 'ndv'='3', 'num_nulls'='1500', 'min_value'='1999-09-03', 'max_value'='2001-09-02', 'data_size'='12000') - """ - - sql """ - alter table store_returns modify column sr_cdemo_sk set stats ('row_count'='287999764', 'ndv'='1916366', 'num_nulls'='10076902', 'min_value'='1', 'max_value'='1920800', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_item_sk set stats ('row_count'='287999764', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_cdemo_sk set stats ('row_count'='2879987999', 'ndv'='1916366', 'num_nulls'='129602155', 'min_value'='1', 'max_value'='1920800', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_ext_discount_amt set stats ('row_count'='2879987999', 'ndv'='1161208', 'num_nulls'='129609101', 'min_value'='0.00', 'max_value'='19778.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_ext_wholesale_cost set stats ('row_count'='2879987999', 'ndv'='393180', 'num_nulls'='129595018', 'min_value'='1.00', 'max_value'='10000.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_list_price set stats ('row_count'='2879987999', 'ndv'='19640', 'num_nulls'='129597020', 'min_value'='1.00', 'max_value'='200.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_net_paid set stats ('row_count'='2879987999', 'ndv'='1288646', 'num_nulls'='129599407', 'min_value'='0.00', 'max_value'='19972.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_sold_date_sk set stats ('row_count'='2879987999', 'ndv'='1820', 'num_nulls'='129600843', 'min_value'='2450816', 'max_value'='2452642', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_sold_time_sk set stats ('row_count'='2879987999', 'ndv'='47252', 'num_nulls'='129593012', 'min_value'='28800', 'max_value'='75599', 'data_size'='23039903992') - """ - - sql """ - alter table ship_mode modify column sm_carrier set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='AIRBORNE', 'max_value'='ZOUROS', 'data_size'='133') - """ - - sql """ - alter table customer modify column c_birth_year set stats ('row_count'='12000000', 'ndv'='69', 'num_nulls'='419584', 'min_value'='1924', 'max_value'='1992', 'data_size'='48000000') - """ - - sql """ - alter table customer modify column c_login set stats ('row_count'='12000000', 'ndv'='1', 'num_nulls'='0', 'min_value'='', 'max_value'='', 'data_size'='0') - """ - - sql """ - alter table customer modify column c_salutation set stats ('row_count'='12000000', 'ndv'='7', 'num_nulls'='0', 'min_value'='', 'max_value'='Sir', 'data_size'='37544445') - """ - - sql """ - alter table reason modify column r_reason_desc set stats ('row_count'='65', 'ndv'='64', 'num_nulls'='0', 'min_value'='Did not fit', 'max_value'='unauthoized purchase', 'data_size'='848') - """ - - sql """ - alter table date_dim modify column d_current_year set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_dom set stats ('row_count'='73049', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_same_day_lq set stats ('row_count'='73049', 'ndv'='72231', 'num_nulls'='0', 'min_value'='2414930', 'max_value'='2487978', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_week_seq set stats ('row_count'='73049', 'ndv'='10448', 'num_nulls'='0', 'min_value'='1', 'max_value'='10436', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_weekend set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table warehouse modify column w_zip set stats ('row_count'='20', 'ndv'='18', 'num_nulls'='0', 'min_value'='19231', 'max_value'='89275', 'data_size'='100') - """ - - sql """ - alter table catalog_sales modify column cs_catalog_page_sk set stats ('row_count'='1439980416', 'ndv'='17005', 'num_nulls'='7199032', 'min_value'='1', 'max_value'='25207', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_coupon_amt set stats ('row_count'='1439980416', 'ndv'='1578778', 'num_nulls'='7198116', 'min_value'='0.00', 'max_value'='28730.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_list_price set stats ('row_count'='1439980416', 'ndv'='29396', 'num_nulls'='7201549', 'min_value'='1.00', 'max_value'='300.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_net_profit set stats ('row_count'='1439980416', 'ndv'='2058398', 'num_nulls'='0', 'min_value'='-10000.00', 'max_value'='19962.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_order_number set stats ('row_count'='1439980416', 'ndv'='159051824', 'num_nulls'='0', 'min_value'='1', 'max_value'='160000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ship_hdemo_sk set stats ('row_count'='1439980416', 'ndv'='7251', 'num_nulls'='7201542', 'min_value'='1', 'max_value'='7200', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_call_center_sk set stats ('row_count'='42', 'ndv'='42', 'num_nulls'='0', 'min_value'='1', 'max_value'='42', 'data_size'='336') - """ - - sql """ - alter table call_center modify column cc_city set stats ('row_count'='42', 'ndv'='17', 'num_nulls'='0', 'min_value'='Antioch', 'max_value'='Spring Hill', 'data_size'='386') - """ - - sql """ - alter table call_center modify column cc_closed_date_sk set stats ('row_count'='42', 'ndv'='0', 'num_nulls'='42', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_gmt_offset set stats ('row_count'='42', 'ndv'='4', 'num_nulls'='0', 'min_value'='-8.00', 'max_value'='-5.00', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_hours set stats ('row_count'='42', 'ndv'='3', 'num_nulls'='0', 'min_value'='8AM-12AM', 'max_value'='8AM-8AM', 'data_size'='300') - """ - - sql """ - alter table call_center modify column cc_street_number set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='38', 'max_value'='999', 'data_size'='120') - """ - - sql """ - alter table call_center modify column cc_tax_percentage set stats ('row_count'='42', 'ndv'='12', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='0.12', 'data_size'='168') - """ - - sql """ - alter table inventory modify column inv_date_sk set stats ('row_count'='783000000', 'ndv'='261', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2452635', 'data_size'='6264000000') - """ - - sql """ - alter table inventory modify column inv_item_sk set stats ('row_count'='783000000', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='6264000000') - """ - - sql """ - alter table catalog_returns modify column cr_fee set stats ('row_count'='143996756', 'ndv'='9958', 'num_nulls'='2882168', 'min_value'='0.50', 'max_value'='100.00', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_return_quantity set stats ('row_count'='143996756', 'ndv'='100', 'num_nulls'='2878774', 'min_value'='1', 'max_value'='100', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returned_time_sk set stats ('row_count'='143996756', 'ndv'='87677', 'num_nulls'='0', 'min_value'='0', 'max_value'='86399', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_dep_count set stats ('row_count'='7200', 'ndv'='10', 'num_nulls'='0', 'min_value'='0', 'max_value'='9', 'data_size'='28800') - """ - - sql """ - alter table customer_address modify column ca_county set stats ('row_count'='6000000', 'ndv'='1825', 'num_nulls'='0', 'min_value'='', 'max_value'='Ziebach County', 'data_size'='81254984') - """ - - sql """ - alter table income_band modify column ib_lower_bound set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='0', 'max_value'='190001', 'data_size'='80') - """ - - sql """ - alter table item modify column i_category_id set stats ('row_count'='300000', 'ndv'='10', 'num_nulls'='766', 'min_value'='1', 'max_value'='10', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_class set stats ('row_count'='300000', 'ndv'='100', 'num_nulls'='0', 'min_value'='', 'max_value'='womens watch', 'data_size'='2331199') - """ - - sql """ - alter table item modify column i_container set stats ('row_count'='300000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='2094652') - """ - - sql """ - alter table item modify column i_current_price set stats ('row_count'='300000', 'ndv'='9685', 'num_nulls'='775', 'min_value'='0.09', 'max_value'='99.99', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_manager_id set stats ('row_count'='300000', 'ndv'='100', 'num_nulls'='744', 'min_value'='1', 'max_value'='100', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_size set stats ('row_count'='300000', 'ndv'='8', 'num_nulls'='0', 'min_value'='', 'max_value'='small', 'data_size'='1296134') - """ - - sql """ - alter table web_returns modify column wr_order_number set stats ('row_count'='71997522', 'ndv'='42383708', 'num_nulls'='0', 'min_value'='1', 'max_value'='60000000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_refunded_cash set stats ('row_count'='71997522', 'ndv'='955369', 'num_nulls'='3240493', 'min_value'='0.00', 'max_value'='26992.92', 'data_size'='287990088') - """ - - sql """ - alter table web_site modify column web_country set stats ('row_count'='54', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='United States', 'data_size'='689') - """ - - sql """ - alter table web_site modify column web_gmt_offset set stats ('row_count'='54', 'ndv'='4', 'num_nulls'='1', 'min_value'='-8.00', 'max_value'='-5.00', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_market_manager set stats ('row_count'='54', 'ndv'='46', 'num_nulls'='0', 'min_value'='', 'max_value'='Zachery Oneil', 'data_size'='691') - """ - - sql """ - alter table web_site modify column web_site_sk set stats ('row_count'='54', 'ndv'='54', 'num_nulls'='0', 'min_value'='1', 'max_value'='54', 'data_size'='432') - """ - - sql """ - alter table web_site modify column web_street_name set stats ('row_count'='54', 'ndv'='53', 'num_nulls'='0', 'min_value'='', 'max_value'='Wilson Ridge', 'data_size'='471') - """ - - sql """ - alter table web_site modify column web_tax_percentage set stats ('row_count'='54', 'ndv'='13', 'num_nulls'='1', 'min_value'='0.00', 'max_value'='0.12', 'data_size'='216') - """ - - sql """ - alter table promotion modify column p_channel_tv set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1481') - """ - - sql """ - alter table promotion modify column p_response_targe set stats ('row_count'='1500', 'ndv'='1', 'num_nulls'='27', 'min_value'='1', 'max_value'='1', 'data_size'='6000') - """ - - sql """ - alter table web_sales modify column ws_bill_addr_sk set stats ('row_count'='720000376', 'ndv'='6015742', 'num_nulls'='179648', 'min_value'='1', 'max_value'='6000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ext_sales_price set stats ('row_count'='720000376', 'ndv'='1091003', 'num_nulls'='180023', 'min_value'='0.00', 'max_value'='29810.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_profit set stats ('row_count'='720000376', 'ndv'='2014057', 'num_nulls'='0', 'min_value'='-10000.00', 'max_value'='19840.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_promo_sk set stats ('row_count'='720000376', 'ndv'='1489', 'num_nulls'='180016', 'min_value'='1', 'max_value'='1500', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ship_customer_sk set stats ('row_count'='720000376', 'ndv'='12074547', 'num_nulls'='179966', 'min_value'='1', 'max_value'='12000000', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_division_name set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='6965') - """ - - sql """ - alter table store modify column s_floor_space set stats ('row_count'='1002', 'ndv'='752', 'num_nulls'='6', 'min_value'='5002549', 'max_value'='9997773', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_tax_precentage set stats ('row_count'='1002', 'ndv'='12', 'num_nulls'='8', 'min_value'='0.00', 'max_value'='0.11', 'data_size'='4008') - """ - - sql """ - alter table time_dim modify column t_time_id set stats ('row_count'='86400', 'ndv'='85663', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPAAAA', 'data_size'='1382400') - """ - - sql """ - alter table time_dim modify column t_time_sk set stats ('row_count'='86400', 'ndv'='87677', 'num_nulls'='0', 'min_value'='0', 'max_value'='86399', 'data_size'='691200') - """ - - sql """ - alter table store_returns modify column sr_fee set stats ('row_count'='287999764', 'ndv'='9958', 'num_nulls'='10081860', 'min_value'='0.50', 'max_value'='100.00', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_reason_sk set stats ('row_count'='287999764', 'ndv'='65', 'num_nulls'='10087936', 'min_value'='1', 'max_value'='65', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_store_credit set stats ('row_count'='287999764', 'ndv'='698161', 'num_nulls'='10077188', 'min_value'='0.00', 'max_value'='17792.48', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_ticket_number set stats ('row_count'='287999764', 'ndv'='168770768', 'num_nulls'='0', 'min_value'='1', 'max_value'='240000000', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_ext_list_price set stats ('row_count'='2879987999', 'ndv'='770971', 'num_nulls'='129593800', 'min_value'='1.00', 'max_value'='20000.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_ext_sales_price set stats ('row_count'='2879987999', 'ndv'='754248', 'num_nulls'='129589177', 'min_value'='0.00', 'max_value'='19972.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_net_profit set stats ('row_count'='2879987999', 'ndv'='1497362', 'num_nulls'='129572933', 'min_value'='-10000.00', 'max_value'='9986.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_promo_sk set stats ('row_count'='2879987999', 'ndv'='1489', 'num_nulls'='129597096', 'min_value'='1', 'max_value'='1500', 'data_size'='23039903992') - """ - - sql """ - alter table ship_mode modify column sm_code set stats ('row_count'='20', 'ndv'='4', 'num_nulls'='0', 'min_value'='AIR', 'max_value'='SURFACE', 'data_size'='87') - """ - - sql """ - alter table ship_mode modify column sm_contract set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='2mM8l', 'max_value'='yVfotg7Tio3MVhBg6Bkn', 'data_size'='252') - """ - - sql """ - alter table customer modify column c_current_hdemo_sk set stats ('row_count'='12000000', 'ndv'='7251', 'num_nulls'='418736', 'min_value'='1', 'max_value'='7200', 'data_size'='96000000') - """ - - sql """ - alter table dbgen_version modify column dv_create_date set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='2023-07-06', 'max_value'='2023-07-06', 'data_size'='4') - """ - - sql """ - alter table dbgen_version modify column dv_create_time set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='2017-05-13 00:00:00', 'max_value'='2017-05-13 00:00:00', 'data_size'='8') - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/query23.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/query23.groovy deleted file mode 100644 index 82db5a725ce44a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/query23.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(context.file) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), - best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100""" - qt_ds_shape_23 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_rf_prune.py b/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_rf_prune.py deleted file mode 100644 index 60e9004c90e13b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_rf_prune.py +++ /dev/null @@ -1,26 +0,0 @@ -# // Licensed to the Apache Software Foundation (ASF) under one -# // or more contributor license agreements. See the NOTICE file -# // distributed with this work for additional information -# // regarding copyright ownership. The ASF licenses this file -# // to you under the Apache License, Version 2.0 (the -# // "License"); you may not use this file except in compliance -# // with the License. You may obtain a copy of the License at -# // -# // http://www.apache.org/licenses/LICENSE-2.0 -# // -# // Unless required by applicable law or agreed to in writing, -# // software distributed under the License is distributed on an -# // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# // KIND, either express or implied. See the License for the -# // specific language governing permissions and limitations -# // under the License. -if __name__ == '__main__': - with open('rf_prune.tmpl', 'r') as f: - tmpl = f.read() - for i in range(1,100): - with open('../../../../tools/tpcds-tools/queries/sf100/query'+str(i)+'.sql', 'r') as fi: - casei = tmpl.replace('{--}', str(i)) - casei = casei.replace('{query}', fi.read().split(";")[0]) - - with open('../rf_prune/query'+str(i)+'.groovy', 'w') as out: - out.write(casei) \ No newline at end of file diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_shape.py b/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_shape.py deleted file mode 100644 index 3fde2ac6936367..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_shape.py +++ /dev/null @@ -1,26 +0,0 @@ -# // Licensed to the Apache Software Foundation (ASF) under one -# // or more contributor license agreements. See the NOTICE file -# // distributed with this work for additional information -# // regarding copyright ownership. The ASF licenses this file -# // to you under the Apache License, Version 2.0 (the -# // "License"); you may not use this file except in compliance -# // with the License. You may obtain a copy of the License at -# // -# // http://www.apache.org/licenses/LICENSE-2.0 -# // -# // Unless required by applicable law or agreed to in writing, -# // software distributed under the License is distributed on an -# // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# // KIND, either express or implied. See the License for the -# // specific language governing permissions and limitations -# // under the License. -if __name__ == '__main__': - with open('shape.tmpl', 'r') as f: - tmpl = f.read() - for i in range(1,100): - with open('../../../../tools/tpcds-tools/queries/sf100/query'+str(i)+'.sql', 'r') as fi: - casei = tmpl.replace('{--}', str(i)) - casei = casei.replace('{query}', fi.read().split(";")[0]) - - with open('../shape/query'+str(i)+'.groovy', 'w') as out: - out.write(casei) \ No newline at end of file diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/rf_prune.tmpl b/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/rf_prune.tmpl deleted file mode 100644 index b29318db8e806a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/rf_prune.tmpl +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query{--}") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - - def ds = """{query}""" - qt_ds_shape_{--} """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/shape.tmpl b/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/shape.tmpl deleted file mode 100644 index c74e55d30c8965..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/shape.tmpl +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query{--}") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - def ds = """{query}""" - qt_ds_shape_{--} """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/load.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/load.groovy deleted file mode 100644 index 4b99eafdea9012..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/load.groovy +++ /dev/null @@ -1,4127 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - - sql ''' - drop table if exists customer_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_demographics ( - cd_demo_sk bigint not null, - cd_gender char(1), - cd_marital_status char(1), - cd_education_status char(20), - cd_purchase_estimate integer, - cd_credit_rating char(10), - cd_dep_count integer, - cd_dep_employed_count integer, - cd_dep_college_count integer - ) - DUPLICATE KEY(cd_demo_sk) - DISTRIBUTED BY HASH(cd_gender) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists reason - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS reason ( - r_reason_sk bigint not null, - r_reason_id char(16) not null, - r_reason_desc char(100) - ) - DUPLICATE KEY(r_reason_sk) - DISTRIBUTED BY HASH(r_reason_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists date_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS date_dim ( - d_date_sk bigint not null, - d_date_id char(16) not null, - d_date datev2, - d_month_seq integer, - d_week_seq integer, - d_quarter_seq integer, - d_year integer, - d_dow integer, - d_moy integer, - d_dom integer, - d_qoy integer, - d_fy_year integer, - d_fy_quarter_seq integer, - d_fy_week_seq integer, - d_day_name char(9), - d_quarter_name char(6), - d_holiday char(1), - d_weekend char(1), - d_following_holiday char(1), - d_first_dom integer, - d_last_dom integer, - d_same_day_ly integer, - d_same_day_lq integer, - d_current_day char(1), - d_current_week char(1), - d_current_month char(1), - d_current_quarter char(1), - d_current_year char(1) - ) - DUPLICATE KEY(d_date_sk) - PARTITION BY RANGE(d_date_sk) - ( - PARTITION `ppast` values less than("2450815"), - PARTITION `p1998` values less than("2451180"), - PARTITION `p1999` values less than("2451545"), - PARTITION `p2000` values less than("2451911"), - PARTITION `p2001` values less than("2452276"), - PARTITION `p2002` values less than("2452641"), - PARTITION `p2003` values less than("2453006"), - PARTITION `pfuture` values less than("9999999") - ) - DISTRIBUTED BY HASH(d_date_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists warehouse - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS warehouse ( - w_warehouse_sk bigint not null, - w_warehouse_id char(16) not null, - w_warehouse_name varchar(20), - w_warehouse_sq_ft integer, - w_street_number char(10), - w_street_name varchar(60), - w_street_type char(15), - w_suite_number char(10), - w_city varchar(60), - w_county varchar(30), - w_state char(2), - w_zip char(10), - w_country varchar(20), - w_gmt_offset decimalv3(5,2) - ) - DUPLICATE KEY(w_warehouse_sk) - DISTRIBUTED BY HASH(w_warehouse_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_sales ( - cs_sold_date_sk bigint, - cs_item_sk bigint not null, - cs_order_number bigint not null, - cs_sold_time_sk bigint, - cs_ship_date_sk bigint, - cs_bill_customer_sk bigint, - cs_bill_cdemo_sk bigint, - cs_bill_hdemo_sk bigint, - cs_bill_addr_sk bigint, - cs_ship_customer_sk bigint, - cs_ship_cdemo_sk bigint, - cs_ship_hdemo_sk bigint, - cs_ship_addr_sk bigint, - cs_call_center_sk bigint, - cs_catalog_page_sk bigint, - cs_ship_mode_sk bigint, - cs_warehouse_sk bigint, - cs_promo_sk bigint, - cs_quantity integer, - cs_wholesale_cost decimalv3(7,2), - cs_list_price decimalv3(7,2), - cs_sales_price decimalv3(7,2), - cs_ext_discount_amt decimalv3(7,2), - cs_ext_sales_price decimalv3(7,2), - cs_ext_wholesale_cost decimalv3(7,2), - cs_ext_list_price decimalv3(7,2), - cs_ext_tax decimalv3(7,2), - cs_coupon_amt decimalv3(7,2), - cs_ext_ship_cost decimalv3(7,2), - cs_net_paid decimalv3(7,2), - cs_net_paid_inc_tax decimalv3(7,2), - cs_net_paid_inc_ship decimalv3(7,2), - cs_net_paid_inc_ship_tax decimalv3(7,2), - cs_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(cs_sold_date_sk, cs_item_sk) - DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists call_center - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS call_center ( - cc_call_center_sk bigint not null, - cc_call_center_id char(16) not null, - cc_rec_start_date datev2, - cc_rec_end_date datev2, - cc_closed_date_sk integer, - cc_open_date_sk integer, - cc_name varchar(50), - cc_class varchar(50), - cc_employees integer, - cc_sq_ft integer, - cc_hours char(20), - cc_manager varchar(40), - cc_mkt_id integer, - cc_mkt_class char(50), - cc_mkt_desc varchar(100), - cc_market_manager varchar(40), - cc_division integer, - cc_division_name varchar(50), - cc_company integer, - cc_company_name char(50), - cc_street_number char(10), - cc_street_name varchar(60), - cc_street_type char(15), - cc_suite_number char(10), - cc_city varchar(60), - cc_county varchar(30), - cc_state char(2), - cc_zip char(10), - cc_country varchar(20), - cc_gmt_offset decimalv3(5,2), - cc_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(cc_call_center_sk) - DISTRIBUTED BY HASH(cc_call_center_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists inventory - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS inventory ( - inv_date_sk bigint not null, - inv_item_sk bigint not null, - inv_warehouse_sk bigint, - inv_quantity_on_hand integer - ) - DUPLICATE KEY(inv_date_sk, inv_item_sk, inv_warehouse_sk) - DISTRIBUTED BY HASH(inv_date_sk, inv_item_sk, inv_warehouse_sk) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_returns ( - cr_item_sk bigint not null, - cr_order_number bigint not null, - cr_returned_date_sk bigint, - cr_returned_time_sk bigint, - cr_refunded_customer_sk bigint, - cr_refunded_cdemo_sk bigint, - cr_refunded_hdemo_sk bigint, - cr_refunded_addr_sk bigint, - cr_returning_customer_sk bigint, - cr_returning_cdemo_sk bigint, - cr_returning_hdemo_sk bigint, - cr_returning_addr_sk bigint, - cr_call_center_sk bigint, - cr_catalog_page_sk bigint, - cr_ship_mode_sk bigint, - cr_warehouse_sk bigint, - cr_reason_sk bigint, - cr_return_quantity integer, - cr_return_amount decimalv3(7,2), - cr_return_tax decimalv3(7,2), - cr_return_amt_inc_tax decimalv3(7,2), - cr_fee decimalv3(7,2), - cr_return_ship_cost decimalv3(7,2), - cr_refunded_cash decimalv3(7,2), - cr_reversed_charge decimalv3(7,2), - cr_store_credit decimalv3(7,2), - cr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(cr_item_sk, cr_order_number) - DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists household_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS household_demographics ( - hd_demo_sk bigint not null, - hd_income_band_sk bigint, - hd_buy_potential char(15), - hd_dep_count integer, - hd_vehicle_count integer - ) - DUPLICATE KEY(hd_demo_sk) - DISTRIBUTED BY HASH(hd_demo_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer_address - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_address ( - ca_address_sk bigint not null, - ca_address_id char(16) not null, - ca_street_number char(10), - ca_street_name varchar(60), - ca_street_type char(15), - ca_suite_number char(10), - ca_city varchar(60), - ca_county varchar(30), - ca_state char(2), - ca_zip char(10), - ca_country varchar(20), - ca_gmt_offset decimalv3(5,2), - ca_location_type char(20) - ) - DUPLICATE KEY(ca_address_sk) - DISTRIBUTED BY HASH(ca_address_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists income_band - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS income_band ( - ib_income_band_sk bigint not null, - ib_lower_bound integer, - ib_upper_bound integer - ) - DUPLICATE KEY(ib_income_band_sk) - DISTRIBUTED BY HASH(ib_income_band_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_page ( - cp_catalog_page_sk bigint not null, - cp_catalog_page_id char(16) not null, - cp_start_date_sk integer, - cp_end_date_sk integer, - cp_department varchar(50), - cp_catalog_number integer, - cp_catalog_page_number integer, - cp_description varchar(100), - cp_type varchar(100) - ) - DUPLICATE KEY(cp_catalog_page_sk) - DISTRIBUTED BY HASH(cp_catalog_page_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists item - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS item ( - i_item_sk bigint not null, - i_item_id char(16) not null, - i_rec_start_date datev2, - i_rec_end_date datev2, - i_item_desc varchar(200), - i_current_price decimalv3(7,2), - i_wholesale_cost decimalv3(7,2), - i_brand_id integer, - i_brand char(50), - i_class_id integer, - i_class char(50), - i_category_id integer, - i_category char(50), - i_manufact_id integer, - i_manufact char(50), - i_size char(20), - i_formulation char(20), - i_color char(20), - i_units char(10), - i_container char(10), - i_manager_id integer, - i_product_name char(50) - ) - DUPLICATE KEY(i_item_sk) - DISTRIBUTED BY HASH(i_item_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_returns ( - wr_item_sk bigint not null, - wr_order_number bigint not null, - wr_returned_date_sk bigint, - wr_returned_time_sk bigint, - wr_refunded_customer_sk bigint, - wr_refunded_cdemo_sk bigint, - wr_refunded_hdemo_sk bigint, - wr_refunded_addr_sk bigint, - wr_returning_customer_sk bigint, - wr_returning_cdemo_sk bigint, - wr_returning_hdemo_sk bigint, - wr_returning_addr_sk bigint, - wr_web_page_sk bigint, - wr_reason_sk bigint, - wr_return_quantity integer, - wr_return_amt decimalv3(7,2), - wr_return_tax decimalv3(7,2), - wr_return_amt_inc_tax decimalv3(7,2), - wr_fee decimalv3(7,2), - wr_return_ship_cost decimalv3(7,2), - wr_refunded_cash decimalv3(7,2), - wr_reversed_charge decimalv3(7,2), - wr_account_credit decimalv3(7,2), - wr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(wr_item_sk, wr_order_number) - DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists web_site - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_site ( - web_site_sk bigint not null, - web_site_id char(16) not null, - web_rec_start_date datev2, - web_rec_end_date datev2, - web_name varchar(50), - web_open_date_sk bigint, - web_close_date_sk bigint, - web_class varchar(50), - web_manager varchar(40), - web_mkt_id integer, - web_mkt_class varchar(50), - web_mkt_desc varchar(100), - web_market_manager varchar(40), - web_company_id integer, - web_company_name char(50), - web_street_number char(10), - web_street_name varchar(60), - web_street_type char(15), - web_suite_number char(10), - web_city varchar(60), - web_county varchar(30), - web_state char(2), - web_zip char(10), - web_country varchar(20), - web_gmt_offset decimalv3(5,2), - web_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(web_site_sk) - DISTRIBUTED BY HASH(web_site_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists promotion - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS promotion ( - p_promo_sk bigint not null, - p_promo_id char(16) not null, - p_start_date_sk bigint, - p_end_date_sk bigint, - p_item_sk bigint, - p_cost decimalv3(15,2), - p_response_targe integer, - p_promo_name char(50), - p_channel_dmail char(1), - p_channel_email char(1), - p_channel_catalog char(1), - p_channel_tv char(1), - p_channel_radio char(1), - p_channel_press char(1), - p_channel_event char(1), - p_channel_demo char(1), - p_channel_details varchar(100), - p_purpose char(15), - p_discount_active char(1) - ) - DUPLICATE KEY(p_promo_sk) - DISTRIBUTED BY HASH(p_promo_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_sales ( - ws_sold_date_sk bigint, - ws_item_sk bigint not null, - ws_order_number bigint not null, - ws_sold_time_sk bigint, - ws_ship_date_sk bigint, - ws_bill_customer_sk bigint, - ws_bill_cdemo_sk bigint, - ws_bill_hdemo_sk bigint, - ws_bill_addr_sk bigint, - ws_ship_customer_sk bigint, - ws_ship_cdemo_sk bigint, - ws_ship_hdemo_sk bigint, - ws_ship_addr_sk bigint, - ws_web_page_sk bigint, - ws_web_site_sk bigint, - ws_ship_mode_sk bigint, - ws_warehouse_sk bigint, - ws_promo_sk bigint, - ws_quantity integer, - ws_wholesale_cost decimalv3(7,2), - ws_list_price decimalv3(7,2), - ws_sales_price decimalv3(7,2), - ws_ext_discount_amt decimalv3(7,2), - ws_ext_sales_price decimalv3(7,2), - ws_ext_wholesale_cost decimalv3(7,2), - ws_ext_list_price decimalv3(7,2), - ws_ext_tax decimalv3(7,2), - ws_coupon_amt decimalv3(7,2), - ws_ext_ship_cost decimalv3(7,2), - ws_net_paid decimalv3(7,2), - ws_net_paid_inc_tax decimalv3(7,2), - ws_net_paid_inc_ship decimalv3(7,2), - ws_net_paid_inc_ship_tax decimalv3(7,2), - ws_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ws_sold_date_sk, ws_item_sk) - DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists store - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store ( - s_store_sk bigint not null, - s_store_id char(16) not null, - s_rec_start_date datev2, - s_rec_end_date datev2, - s_closed_date_sk bigint, - s_store_name varchar(50), - s_number_employees integer, - s_floor_space integer, - s_hours char(20), - s_manager varchar(40), - s_market_id integer, - s_geography_class varchar(100), - s_market_desc varchar(100), - s_market_manager varchar(40), - s_division_id integer, - s_division_name varchar(50), - s_company_id integer, - s_company_name varchar(50), - s_street_number varchar(10), - s_street_name varchar(60), - s_street_type char(15), - s_suite_number char(10), - s_city varchar(60), - s_county varchar(30), - s_state char(2), - s_zip char(10), - s_country varchar(20), - s_gmt_offset decimalv3(5,2), - s_tax_precentage decimalv3(5,2) - ) - DUPLICATE KEY(s_store_sk) - DISTRIBUTED BY HASH(s_store_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists time_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS time_dim ( - t_time_sk bigint not null, - t_time_id char(16) not null, - t_time integer, - t_hour integer, - t_minute integer, - t_second integer, - t_am_pm char(2), - t_shift char(20), - t_sub_shift char(20), - t_meal_time char(20) - ) - DUPLICATE KEY(t_time_sk) - DISTRIBUTED BY HASH(t_time_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_page ( - wp_web_page_sk bigint not null, - wp_web_page_id char(16) not null, - wp_rec_start_date datev2, - wp_rec_end_date datev2, - wp_creation_date_sk bigint, - wp_access_date_sk bigint, - wp_autogen_flag char(1), - wp_customer_sk bigint, - wp_url varchar(100), - wp_type char(50), - wp_char_count integer, - wp_link_count integer, - wp_image_count integer, - wp_max_ad_count integer - ) - DUPLICATE KEY(wp_web_page_sk) - DISTRIBUTED BY HASH(wp_web_page_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists store_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_returns ( - sr_item_sk bigint not null, - sr_ticket_number bigint not null, - sr_returned_date_sk bigint, - sr_return_time_sk bigint, - sr_customer_sk bigint, - sr_cdemo_sk bigint, - sr_hdemo_sk bigint, - sr_addr_sk bigint, - sr_store_sk bigint, - sr_reason_sk bigint, - sr_return_quantity integer, - sr_return_amt decimalv3(7,2), - sr_return_tax decimalv3(7,2), - sr_return_amt_inc_tax decimalv3(7,2), - sr_fee decimalv3(7,2), - sr_return_ship_cost decimalv3(7,2), - sr_refunded_cash decimalv3(7,2), - sr_reversed_charge decimalv3(7,2), - sr_store_credit decimalv3(7,2), - sr_net_loss decimalv3(7,2) - ) - duplicate key(sr_item_sk, sr_ticket_number) - distributed by hash (sr_item_sk, sr_ticket_number) buckets 32 - properties ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists store_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_sales ( - ss_sold_date_sk bigint, - ss_item_sk bigint not null, - ss_ticket_number bigint not null, - ss_sold_time_sk bigint, - ss_customer_sk bigint, - ss_cdemo_sk bigint, - ss_hdemo_sk bigint, - ss_addr_sk bigint, - ss_store_sk bigint, - ss_promo_sk bigint, - ss_quantity integer, - ss_wholesale_cost decimalv3(7,2), - ss_list_price decimalv3(7,2), - ss_sales_price decimalv3(7,2), - ss_ext_discount_amt decimalv3(7,2), - ss_ext_sales_price decimalv3(7,2), - ss_ext_wholesale_cost decimalv3(7,2), - ss_ext_list_price decimalv3(7,2), - ss_ext_tax decimalv3(7,2), - ss_coupon_amt decimalv3(7,2), - ss_net_paid decimalv3(7,2), - ss_net_paid_inc_tax decimalv3(7,2), - ss_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ss_sold_date_sk, ss_item_sk) - DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists ship_mode - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS ship_mode ( - sm_ship_mode_sk bigint not null, - sm_ship_mode_id char(16) not null, - sm_type char(30), - sm_code char(10), - sm_carrier char(20), - sm_contract char(20) - ) - DUPLICATE KEY(sm_ship_mode_sk) - DISTRIBUTED BY HASH(sm_ship_mode_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer ( - c_customer_sk bigint not null, - c_customer_id char(16) not null, - c_current_cdemo_sk bigint, - c_current_hdemo_sk bigint, - c_current_addr_sk bigint, - c_first_shipto_date_sk bigint, - c_first_sales_date_sk bigint, - c_salutation char(10), - c_first_name char(20), - c_last_name char(30), - c_preferred_cust_flag char(1), - c_birth_day integer, - c_birth_month integer, - c_birth_year integer, - c_birth_country varchar(20), - c_login char(13), - c_email_address char(50), - c_last_review_date_sk bigint - ) - DUPLICATE KEY(c_customer_sk) - DISTRIBUTED BY HASH(c_customer_id) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists dbgen_version - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS dbgen_version - ( - dv_version varchar(16) , - dv_create_date datev2 , - dv_create_time datetime , - dv_cmdline_args varchar(200) - ) - DUPLICATE KEY(dv_version) - DISTRIBUTED BY HASH(dv_version) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - -sql ''' -alter table customer add constraint customer_pk primary key (c_customer_sk); -''' - -sql ''' -alter table customer add constraint customer_uk unique (c_customer_id); -''' - -sql ''' -alter table store_sales add constraint ss_fk foreign key(ss_customer_sk) references customer(c_customer_sk); -''' - -sql ''' -alter table web_sales add constraint ws_fk foreign key(ws_bill_customer_sk) references customer(c_customer_sk); -''' - -sql ''' -alter table catalog_sales add constraint cs_fk foreign key(cs_bill_customer_sk) references customer(c_customer_sk); -''' - -sql """ -alter table web_sales modify column ws_web_site_sk set stats ('row_count'='72001237', 'ndv'='24', 'min_value'='1', 'max_value'='24', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table web_returns modify column wr_item_sk set stats ('row_count'='7197670', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table customer modify column c_birth_country set stats ('row_count'='2000000', 'ndv'='211', 'min_value'='', 'max_value'='ZIMBABWE', 'avg_size'='16787900', 'max_size'='16787900' ) -""" - -sql """ -alter table web_page modify column wp_rec_start_date set stats ('row_count'='2040', 'ndv'='4', 'min_value'='1997-09-03', 'max_value'='2001-09-03', 'avg_size'='8160', 'max_size'='8160' ) -""" - -sql """ -alter table store_returns modify column sr_store_credit set stats ('row_count'='28795080', 'ndv'='9907', 'min_value'='0.00', 'max_value'='15642.11', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table warehouse modify column w_county set stats ('row_count'='15', 'ndv'='8', 'min_value'='Barrow County', 'max_value'='Ziebach County', 'avg_size'='207', 'max_size'='207' ) -""" - -sql """ -alter table customer_demographics modify column cd_gender set stats ('row_count'='1920800', 'ndv'='2', 'min_value'='F', 'max_value'='M', 'avg_size'='1920800', 'max_size'='1920800' ) -""" - -sql """ -alter table web_returns modify column wr_refunded_cdemo_sk set stats ('row_count'='7197670', 'ndv'='1868495', 'min_value'='1', 'max_value'='1920800', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table warehouse modify column w_warehouse_id set stats ('row_count'='15', 'ndv'='15', 'min_value'='AAAAAAAABAAAAAAA', 'max_value'='AAAAAAAAPAAAAAAA', 'avg_size'='240', 'max_size'='240' ) -""" - -sql """ -alter table item modify column i_size set stats ('row_count'='204000', 'ndv'='8', 'min_value'='', 'max_value'='small', 'avg_size'='880961', 'max_size'='880961' ) -""" - -sql """ -alter table web_sales modify column ws_sales_price set stats ('row_count'='72001237', 'ndv'='302', 'min_value'='0.00', 'max_value'='300.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table date_dim modify column d_week_seq set stats ('row_count'='73049', 'ndv'='10448', 'min_value'='1', 'max_value'='10436', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table store modify column s_country set stats ('row_count'='402', 'ndv'='2', 'min_value'='', 'max_value'='United States', 'avg_size'='5174', 'max_size'='5174' ) -""" - -sql """ -alter table household_demographics modify column hd_income_band_sk set stats ('row_count'='7200', 'ndv'='20', 'min_value'='1', 'max_value'='20', 'avg_size'='57600', 'max_size'='57600' ) -""" - -sql """ -alter table web_page modify column wp_creation_date_sk set stats ('row_count'='2040', 'ndv'='134', 'min_value'='2450672', 'max_value'='2450815', 'avg_size'='16320', 'max_size'='16320' ) -""" - -sql """ -alter table catalog_returns modify column cr_reason_sk set stats ('row_count'='14404374', 'ndv'='55', 'min_value'='1', 'max_value'='55', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table web_site modify column web_city set stats ('row_count'='24', 'ndv'='11', 'min_value'='Centerville', 'max_value'='Salem', 'avg_size'='232', 'max_size'='232' ) -""" - -sql """ -alter table item modify column i_class_id set stats ('row_count'='204000', 'ndv'='16', 'min_value'='1', 'max_value'='16', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table catalog_returns modify column cr_refunded_hdemo_sk set stats ('row_count'='14404374', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table web_page modify column wp_customer_sk set stats ('row_count'='2040', 'ndv'='475', 'min_value'='711', 'max_value'='1996257', 'avg_size'='16320', 'max_size'='16320' ) -""" - -sql """ -alter table customer_demographics modify column cd_marital_status set stats ('row_count'='1920800', 'ndv'='5', 'min_value'='D', 'max_value'='W', 'avg_size'='1920800', 'max_size'='1920800' ) -""" - -sql """ -alter table call_center modify column cc_suite_number set stats ('row_count'='30', 'ndv'='14', 'min_value'='Suite 0', 'max_value'='Suite W', 'avg_size'='234', 'max_size'='234' ) -""" - -sql """ -alter table web_page modify column wp_url set stats ('row_count'='2040', 'ndv'='2', 'min_value'='', 'max_value'='http://www.foo.com', 'avg_size'='36270', 'max_size'='36270' ) -""" - -sql """ -alter table web_sales modify column ws_wholesale_cost set stats ('row_count'='72001237', 'ndv'='100', 'min_value'='1.00', 'max_value'='100.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table catalog_returns modify column cr_return_quantity set stats ('row_count'='14404374', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table catalog_sales modify column cs_wholesale_cost set stats ('row_count'='143997065', 'ndv'='100', 'min_value'='1.00', 'max_value'='100.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table store_sales modify column ss_quantity set stats ('row_count'='287997024', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table date_dim modify column d_quarter_seq set stats ('row_count'='73049', 'ndv'='801', 'min_value'='1', 'max_value'='801', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table date_dim modify column d_current_week set stats ('row_count'='73049', 'ndv'='1', 'min_value'='N', 'max_value'='N', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table web_returns modify column wr_reason_sk set stats ('row_count'='7197670', 'ndv'='55', 'min_value'='1', 'max_value'='55', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table promotion modify column p_channel_catalog set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='986', 'max_size'='986' ) -""" - -sql """ -alter table catalog_sales modify column cs_net_paid_inc_ship_tax set stats ('row_count'='143997065', 'ndv'='38890', 'min_value'='0.00', 'max_value'='45460.80', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table catalog_sales modify column cs_order_number set stats ('row_count'='143997065', 'ndv'='16050730', 'min_value'='1', 'max_value'='16000000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table time_dim modify column t_am_pm set stats ('row_count'='86400', 'ndv'='2', 'min_value'='AM', 'max_value'='PM', 'avg_size'='172800', 'max_size'='172800' ) -""" - -sql """ -alter table promotion modify column p_promo_name set stats ('row_count'='1000', 'ndv'='11', 'min_value'='', 'max_value'='pri', 'avg_size'='3924', 'max_size'='3924' ) -""" - -sql """ -alter table web_site modify column web_manager set stats ('row_count'='24', 'ndv'='19', 'min_value'='Adam Stonge', 'max_value'='Tommy Jones', 'avg_size'='297', 'max_size'='297' ) -""" - -sql """ -alter table store modify column s_gmt_offset set stats ('row_count'='402', 'ndv'='2', 'min_value'='-6.00', 'max_value'='-5.00', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table web_sales modify column ws_quantity set stats ('row_count'='72001237', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table date_dim modify column d_weekend set stats ('row_count'='73049', 'ndv'='2', 'min_value'='N', 'max_value'='Y', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table store modify column s_number_employees set stats ('row_count'='402', 'ndv'='97', 'min_value'='200', 'max_value'='300', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table call_center modify column cc_mkt_desc set stats ('row_count'='30', 'ndv'='22', 'min_value'='As existing eyebrows miss as the matters. Realistic stories may not face almost by a ', 'max_value'='Young tests could buy comfortable, local users o', 'avg_size'='1766', 'max_size'='1766' ) -""" - -sql """ -alter table web_sales modify column ws_net_paid_inc_ship set stats ('row_count'='72001237', 'ndv'='36553', 'min_value'='0.00', 'max_value'='43468.92', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table item modify column i_item_sk set stats ('row_count'='204000', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='1632000', 'max_size'='1632000' ) -""" - -sql """ -alter table web_sales modify column ws_bill_addr_sk set stats ('row_count'='72001237', 'ndv'='998891', 'min_value'='1', 'max_value'='1000000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table customer modify column c_salutation set stats ('row_count'='2000000', 'ndv'='7', 'min_value'='', 'max_value'='Sir', 'avg_size'='6257882', 'max_size'='6257882' ) -""" - -sql """ -alter table web_sales modify column ws_net_paid set stats ('row_count'='72001237', 'ndv'='26912', 'min_value'='0.00', 'max_value'='29810.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table time_dim modify column t_time set stats ('row_count'='86400', 'ndv'='86684', 'min_value'='0', 'max_value'='86399', 'avg_size'='345600', 'max_size'='345600' ) -""" - -sql """ -alter table web_site modify column web_mkt_id set stats ('row_count'='24', 'ndv'='6', 'min_value'='1', 'max_value'='6', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table store_returns modify column sr_hdemo_sk set stats ('row_count'='28795080', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table catalog_page modify column cp_catalog_page_sk set stats ('row_count'='20400', 'ndv'='20554', 'min_value'='1', 'max_value'='20400', 'avg_size'='163200', 'max_size'='163200' ) -""" - -sql """ -alter table customer_address modify column ca_address_id set stats ('row_count'='1000000', 'ndv'='999950', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPOAAA', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table date_dim modify column d_year set stats ('row_count'='73049', 'ndv'='202', 'min_value'='1900', 'max_value'='2100', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table web_returns modify column wr_net_loss set stats ('row_count'='7197670', 'ndv'='11012', 'min_value'='0.50', 'max_value'='15068.96', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table store modify column s_closed_date_sk set stats ('row_count'='402', 'ndv'='69', 'min_value'='2450823', 'max_value'='2451313', 'avg_size'='3216', 'max_size'='3216' ) -""" - -sql """ -alter table customer_address modify column ca_city set stats ('row_count'='1000000', 'ndv'='977', 'min_value'='', 'max_value'='Zion', 'avg_size'='8681993', 'max_size'='8681993' ) -""" - -sql """ -alter table customer modify column c_customer_id set stats ('row_count'='2000000', 'ndv'='1994557', 'min_value'='AAAAAAAAAAAAABAA', 'max_value'='AAAAAAAAPPPPPAAA', 'avg_size'='32000000', 'max_size'='32000000' ) -""" - -sql """ -alter table web_page modify column wp_access_date_sk set stats ('row_count'='2040', 'ndv'='101', 'min_value'='2452548', 'max_value'='2452648', 'avg_size'='16320', 'max_size'='16320' ) -""" - -sql """ -alter table warehouse modify column w_gmt_offset set stats ('row_count'='15', 'ndv'='2', 'min_value'='-6.00', 'max_value'='-5.00', 'avg_size'='60', 'max_size'='60' ) -""" - -sql """ -alter table warehouse modify column w_street_number set stats ('row_count'='15', 'ndv'='15', 'min_value'='', 'max_value'='957', 'avg_size'='40', 'max_size'='40' ) -""" - -sql """ -alter table store_sales modify column ss_ticket_number set stats ('row_count'='287997024', 'ndv'='23905324', 'min_value'='1', 'max_value'='24000000', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table catalog_returns modify column cr_fee set stats ('row_count'='14404374', 'ndv'='101', 'min_value'='0.50', 'max_value'='100.00', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table date_dim modify column d_current_quarter set stats ('row_count'='73049', 'ndv'='2', 'min_value'='N', 'max_value'='Y', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table store modify column s_store_name set stats ('row_count'='402', 'ndv'='11', 'min_value'='', 'max_value'='pri', 'avg_size'='1575', 'max_size'='1575' ) -""" - -sql """ -alter table catalog_sales modify column cs_ext_wholesale_cost set stats ('row_count'='143997065', 'ndv'='10009', 'min_value'='1.00', 'max_value'='10000.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table reason modify column r_reason_desc set stats ('row_count'='55', 'ndv'='54', 'min_value'='Did not fit', 'max_value'='unauthoized purchase', 'avg_size'='758', 'max_size'='758' ) -""" - -sql """ -alter table date_dim modify column d_same_day_ly set stats ('row_count'='73049', 'ndv'='72450', 'min_value'='2414657', 'max_value'='2487705', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table web_site modify column web_gmt_offset set stats ('row_count'='24', 'ndv'='2', 'min_value'='-6.00', 'max_value'='-5.00', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table time_dim modify column t_sub_shift set stats ('row_count'='86400', 'ndv'='4', 'min_value'='afternoon', 'max_value'='night', 'avg_size'='597600', 'max_size'='597600' ) -""" - -sql """ -alter table web_sales modify column ws_ship_customer_sk set stats ('row_count'='72001237', 'ndv'='1898561', 'min_value'='1', 'max_value'='2000000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table web_site modify column web_close_date_sk set stats ('row_count'='24', 'ndv'='8', 'min_value'='2443328', 'max_value'='2447131', 'avg_size'='192', 'max_size'='192' ) -""" - -sql """ -alter table call_center modify column cc_market_manager set stats ('row_count'='30', 'ndv'='24', 'min_value'='Charles Corbett', 'max_value'='Tom Root', 'avg_size'='373', 'max_size'='373' ) -""" - -sql """ -alter table store modify column s_market_desc set stats ('row_count'='402', 'ndv'='311', 'min_value'='', 'max_value'='Years get acute years. Right likely players mus', 'avg_size'='23261', 'max_size'='23261' ) -""" - -sql """ -alter table call_center modify column cc_sq_ft set stats ('row_count'='30', 'ndv'='22', 'min_value'='1670015', 'max_value'='31896816', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table customer_address modify column ca_country set stats ('row_count'='1000000', 'ndv'='2', 'min_value'='', 'max_value'='United States', 'avg_size'='12608739', 'max_size'='12608739' ) -""" - -sql """ -alter table promotion modify column p_promo_id set stats ('row_count'='1000', 'ndv'='1004', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPCAAAAA', 'avg_size'='16000', 'max_size'='16000' ) -""" - -sql """ -alter table customer modify column c_preferred_cust_flag set stats ('row_count'='2000000', 'ndv'='3', 'min_value'='', 'max_value'='Y', 'avg_size'='1930222', 'max_size'='1930222' ) -""" - -sql """ -alter table catalog_page modify column cp_catalog_page_id set stats ('row_count'='20400', 'ndv'='20341', 'min_value'='AAAAAAAAAAABAAAA', 'max_value'='AAAAAAAAPPPDAAAA', 'avg_size'='326400', 'max_size'='326400' ) -""" - -sql """ -alter table household_demographics modify column hd_dep_count set stats ('row_count'='7200', 'ndv'='10', 'min_value'='0', 'max_value'='9', 'avg_size'='28800', 'max_size'='28800' ) -""" - -sql """ -alter table store_sales modify column ss_ext_wholesale_cost set stats ('row_count'='287997024', 'ndv'='10009', 'min_value'='1.00', 'max_value'='10000.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table promotion modify column p_end_date_sk set stats ('row_count'='1000', 'ndv'='571', 'min_value'='2450116', 'max_value'='2450967', 'avg_size'='8000', 'max_size'='8000' ) -""" - -sql """ -alter table catalog_sales modify column cs_sold_date_sk set stats ('row_count'='143997065', 'ndv'='1835', 'min_value'='2450815', 'max_value'='2452654', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table web_returns modify column wr_return_quantity set stats ('row_count'='7197670', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table store_returns modify column sr_return_amt set stats ('row_count'='28795080', 'ndv'='15493', 'min_value'='0.00', 'max_value'='18973.20', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table web_site modify column web_rec_start_date set stats ('row_count'='24', 'ndv'='4', 'min_value'='1997-08-16', 'max_value'='2001-08-16', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table store_sales modify column ss_coupon_amt set stats ('row_count'='287997024', 'ndv'='16198', 'min_value'='0.00', 'max_value'='19225.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table call_center modify column cc_company set stats ('row_count'='30', 'ndv'='6', 'min_value'='1', 'max_value'='6', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table warehouse modify column w_state set stats ('row_count'='15', 'ndv'='8', 'min_value'='AL', 'max_value'='SD', 'avg_size'='30', 'max_size'='30' ) -""" - -sql """ -alter table catalog_returns modify column cr_warehouse_sk set stats ('row_count'='14404374', 'ndv'='15', 'min_value'='1', 'max_value'='15', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table catalog_returns modify column cr_returning_customer_sk set stats ('row_count'='14404374', 'ndv'='1991754', 'min_value'='1', 'max_value'='2000000', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table customer_address modify column ca_state set stats ('row_count'='1000000', 'ndv'='52', 'min_value'='', 'max_value'='WY', 'avg_size'='1939752', 'max_size'='1939752' ) -""" - -sql """ -alter table customer modify column c_customer_sk set stats ('row_count'='2000000', 'ndv'='1994393', 'min_value'='1', 'max_value'='2000000', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table store_sales modify column ss_item_sk set stats ('row_count'='287997024', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table catalog_sales modify column cs_ship_customer_sk set stats ('row_count'='143997065', 'ndv'='1993190', 'min_value'='1', 'max_value'='2000000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table web_returns modify column wr_refunded_cash set stats ('row_count'='7197670', 'ndv'='14621', 'min_value'='0.00', 'max_value'='26466.56', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table customer modify column c_birth_day set stats ('row_count'='2000000', 'ndv'='31', 'min_value'='1', 'max_value'='31', 'avg_size'='8000000', 'max_size'='8000000' ) -""" - -sql """ -alter table income_band modify column ib_income_band_sk set stats ('row_count'='20', 'ndv'='20', 'min_value'='1', 'max_value'='20', 'avg_size'='160', 'max_size'='160' ) -""" - -sql """ -alter table web_returns modify column wr_fee set stats ('row_count'='7197670', 'ndv'='101', 'min_value'='0.50', 'max_value'='100.00', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table item modify column i_class set stats ('row_count'='204000', 'ndv'='100', 'min_value'='', 'max_value'='womens watch', 'avg_size'='1585937', 'max_size'='1585937' ) -""" - -sql """ -alter table customer modify column c_last_review_date_sk set stats ('row_count'='2000000', 'ndv'='366', 'min_value'='2452283', 'max_value'='2452648', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table web_site modify column web_rec_end_date set stats ('row_count'='24', 'ndv'='3', 'min_value'='1999-08-16', 'max_value'='2001-08-15', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table catalog_returns modify column cr_reversed_charge set stats ('row_count'='14404374', 'ndv'='12359', 'min_value'='0.00', 'max_value'='23801.24', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table customer_address modify column ca_location_type set stats ('row_count'='1000000', 'ndv'='4', 'min_value'='', 'max_value'='single family', 'avg_size'='8728128', 'max_size'='8728128' ) -""" - -sql """ -alter table warehouse modify column w_street_type set stats ('row_count'='15', 'ndv'='11', 'min_value'='', 'max_value'='Wy', 'avg_size'='58', 'max_size'='58' ) -""" - -sql """ -alter table web_returns modify column wr_refunded_hdemo_sk set stats ('row_count'='7197670', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table call_center modify column cc_manager set stats ('row_count'='30', 'ndv'='22', 'min_value'='Alden Snyder', 'max_value'='Wayne Ray', 'avg_size'='368', 'max_size'='368' ) -""" - -sql """ -alter table web_site modify column web_open_date_sk set stats ('row_count'='24', 'ndv'='12', 'min_value'='2450628', 'max_value'='2450807', 'avg_size'='192', 'max_size'='192' ) -""" - -sql """ -alter table dbgen_version modify column dv_version set stats ('row_count'='1', 'ndv'='1', 'min_value'='3.2.0', 'max_value'='3.2.0', 'avg_size'='5', 'max_size'='5' ) -""" - -sql """ -alter table catalog_sales modify column cs_sales_price set stats ('row_count'='143997065', 'ndv'='302', 'min_value'='0.00', 'max_value'='300.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table catalog_page modify column cp_catalog_number set stats ('row_count'='20400', 'ndv'='109', 'min_value'='1', 'max_value'='109', 'avg_size'='81600', 'max_size'='81600' ) -""" - -sql """ -alter table promotion modify column p_channel_press set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='985', 'max_size'='985' ) -""" - -sql """ -alter table web_sales modify column ws_ship_addr_sk set stats ('row_count'='72001237', 'ndv'='997336', 'min_value'='1', 'max_value'='1000000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table catalog_returns modify column cr_refunded_cash set stats ('row_count'='14404374', 'ndv'='16271', 'min_value'='0.00', 'max_value'='24544.84', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table call_center modify column cc_mkt_class set stats ('row_count'='30', 'ndv'='25', 'min_value'='A bit narrow forms matter animals. Consist', 'max_value'='Yesterday new men can make moreov', 'avg_size'='1033', 'max_size'='1033' ) -""" - -sql """ -alter table catalog_returns modify column cr_returned_date_sk set stats ('row_count'='14404374', 'ndv'='2105', 'min_value'='2450821', 'max_value'='2452921', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table web_page modify column wp_max_ad_count set stats ('row_count'='2040', 'ndv'='5', 'min_value'='0', 'max_value'='4', 'avg_size'='8160', 'max_size'='8160' ) -""" - -sql """ -alter table call_center modify column cc_closed_date_sk set stats ('row_count'='30', 'ndv'='0', 'num_nulls'='42', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table web_returns modify column wr_return_ship_cost set stats ('row_count'='7197670', 'ndv'='10429', 'min_value'='0.00', 'max_value'='13602.60', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table warehouse modify column w_warehouse_name set stats ('row_count'='15', 'ndv'='15', 'min_value'='', 'max_value'='Rooms cook ', 'avg_size'='230', 'max_size'='230' ) -""" - -sql """ -alter table web_page modify column wp_type set stats ('row_count'='2040', 'ndv'='8', 'min_value'='', 'max_value'='welcome', 'avg_size'='12856', 'max_size'='12856' ) -""" - -sql """ -alter table store modify column s_division_name set stats ('row_count'='402', 'ndv'='2', 'min_value'='', 'max_value'='Unknown', 'avg_size'='2779', 'max_size'='2779' ) -""" - -sql """ -alter table date_dim modify column d_dom set stats ('row_count'='73049', 'ndv'='31', 'min_value'='1', 'max_value'='31', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table date_dim modify column d_fy_week_seq set stats ('row_count'='73049', 'ndv'='10448', 'min_value'='1', 'max_value'='10436', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table web_returns modify column wr_return_tax set stats ('row_count'='7197670', 'ndv'='1820', 'min_value'='0.00', 'max_value'='2551.16', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table catalog_sales modify column cs_ship_addr_sk set stats ('row_count'='143997065', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table store modify column s_street_name set stats ('row_count'='402', 'ndv'='256', 'min_value'='', 'max_value'='Woodland ', 'avg_size'='3384', 'max_size'='3384' ) -""" - -sql """ -alter table store_sales modify column ss_hdemo_sk set stats ('row_count'='287997024', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table web_sales modify column ws_web_page_sk set stats ('row_count'='72001237', 'ndv'='2032', 'min_value'='1', 'max_value'='2040', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table warehouse modify column w_warehouse_sq_ft set stats ('row_count'='15', 'ndv'='14', 'min_value'='73065', 'max_value'='977787', 'avg_size'='60', 'max_size'='60' ) -""" - -sql """ -alter table ship_mode modify column sm_type set stats ('row_count'='20', 'ndv'='6', 'min_value'='EXPRESS', 'max_value'='TWO DAY', 'avg_size'='150', 'max_size'='150' ) -""" - -sql """ -alter table date_dim modify column d_fy_year set stats ('row_count'='73049', 'ndv'='202', 'min_value'='1900', 'max_value'='2100', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table catalog_sales modify column cs_catalog_page_sk set stats ('row_count'='143997065', 'ndv'='11515', 'min_value'='1', 'max_value'='17108', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table web_sales modify column ws_warehouse_sk set stats ('row_count'='72001237', 'ndv'='15', 'min_value'='1', 'max_value'='15', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table item modify column i_wholesale_cost set stats ('row_count'='204000', 'ndv'='89', 'min_value'='0.02', 'max_value'='88.91', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table store_returns modify column sr_return_tax set stats ('row_count'='28795080', 'ndv'='1427', 'min_value'='0.00', 'max_value'='1611.71', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table store_sales modify column ss_net_paid_inc_tax set stats ('row_count'='287997024', 'ndv'='20203', 'min_value'='0.00', 'max_value'='21344.38', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table web_site modify column web_mkt_desc set stats ('row_count'='24', 'ndv'='15', 'min_value'='Acres see else children. Mutual too', 'max_value'='Well similar decisions used to keep hardly democratic, personal priorities.', 'avg_size'='1561', 'max_size'='1561' ) -""" - -sql """ -alter table customer modify column c_current_cdemo_sk set stats ('row_count'='2000000', 'ndv'='1221921', 'min_value'='1', 'max_value'='1920798', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table web_returns modify column wr_returning_customer_sk set stats ('row_count'='7197670', 'ndv'='1926139', 'min_value'='1', 'max_value'='2000000', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table store_sales modify column ss_ext_sales_price set stats ('row_count'='287997024', 'ndv'='19105', 'min_value'='0.00', 'max_value'='19878.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table catalog_sales modify column cs_item_sk set stats ('row_count'='143997065', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table store modify column s_store_id set stats ('row_count'='402', 'ndv'='201', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPNAAAAAA', 'avg_size'='6432', 'max_size'='6432' ) -""" - -sql """ -alter table web_site modify column web_mkt_class set stats ('row_count'='24', 'ndv'='18', 'min_value'='About rural reasons shall no', 'max_value'='Wide, final representat', 'avg_size'='758', 'max_size'='758' ) -""" - -sql """ -alter table customer modify column c_birth_month set stats ('row_count'='2000000', 'ndv'='12', 'min_value'='1', 'max_value'='12', 'avg_size'='8000000', 'max_size'='8000000' ) -""" - -sql """ -alter table date_dim modify column d_last_dom set stats ('row_count'='73049', 'ndv'='2419', 'min_value'='2415020', 'max_value'='2488372', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table web_sales modify column ws_bill_customer_sk set stats ('row_count'='72001237', 'ndv'='1899439', 'min_value'='1', 'max_value'='2000000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table web_sales modify column ws_item_sk set stats ('row_count'='72001237', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table call_center modify column cc_state set stats ('row_count'='30', 'ndv'='8', 'min_value'='AL', 'max_value'='TN', 'avg_size'='60', 'max_size'='60' ) -""" - -sql """ -alter table promotion modify column p_start_date_sk set stats ('row_count'='1000', 'ndv'='574', 'min_value'='2450100', 'max_value'='2450915', 'avg_size'='8000', 'max_size'='8000' ) -""" - -sql """ -alter table catalog_sales modify column cs_ship_date_sk set stats ('row_count'='143997065', 'ndv'='1933', 'min_value'='2450817', 'max_value'='2452744', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table store_sales modify column ss_sales_price set stats ('row_count'='287997024', 'ndv'='202', 'min_value'='0.00', 'max_value'='200.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table promotion modify column p_channel_details set stats ('row_count'='1000', 'ndv'='992', 'min_value'='', 'max_value'='Young, valuable companies watch walls. Payments can flour', 'avg_size'='39304', 'max_size'='39304' ) -""" - -sql """ -alter table item modify column i_rec_end_date set stats ('row_count'='204000', 'ndv'='3', 'min_value'='1999-10-27', 'max_value'='2001-10-26', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table item modify column i_container set stats ('row_count'='204000', 'ndv'='2', 'min_value'='', 'max_value'='Unknown', 'avg_size'='1424430', 'max_size'='1424430' ) -""" - -sql """ -alter table web_site modify column web_tax_percentage set stats ('row_count'='24', 'ndv'='1', 'min_value'='0.00', 'max_value'='0.12', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table customer modify column c_email_address set stats ('row_count'='2000000', 'ndv'='1936613', 'min_value'='', 'max_value'='Zulma.Wright@AqokXsju9f2yj.org', 'avg_size'='53014147', 'max_size'='53014147' ) -""" - -sql """ -alter table income_band modify column ib_lower_bound set stats ('row_count'='20', 'ndv'='20', 'min_value'='0', 'max_value'='190001', 'avg_size'='80', 'max_size'='80' ) -""" - -sql """ -alter table web_returns modify column wr_account_credit set stats ('row_count'='7197670', 'ndv'='10868', 'min_value'='0.00', 'max_value'='23028.27', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table web_sales modify column ws_bill_hdemo_sk set stats ('row_count'='72001237', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table store_sales modify column ss_store_sk set stats ('row_count'='287997024', 'ndv'='200', 'min_value'='1', 'max_value'='400', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table store_returns modify column sr_customer_sk set stats ('row_count'='28795080', 'ndv'='1994323', 'min_value'='1', 'max_value'='2000000', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table call_center modify column cc_class set stats ('row_count'='30', 'ndv'='3', 'min_value'='large', 'max_value'='small', 'avg_size'='166', 'max_size'='166' ) -""" - -sql """ -alter table time_dim modify column t_meal_time set stats ('row_count'='86400', 'ndv'='4', 'min_value'='', 'max_value'='lunch', 'avg_size'='248400', 'max_size'='248400' ) -""" - -sql """ -alter table web_site modify column web_street_number set stats ('row_count'='24', 'ndv'='14', 'min_value'='184', 'max_value'='973', 'avg_size'='70', 'max_size'='70' ) -""" - -sql """ -alter table catalog_sales modify column cs_promo_sk set stats ('row_count'='143997065', 'ndv'='986', 'min_value'='1', 'max_value'='1000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table customer modify column c_last_name set stats ('row_count'='2000000', 'ndv'='4990', 'min_value'='', 'max_value'='Zuniga', 'avg_size'='11833714', 'max_size'='11833714' ) -""" - -sql """ -alter table promotion modify column p_channel_event set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='986', 'max_size'='986' ) -""" - -sql """ -alter table store_returns modify column sr_return_amt_inc_tax set stats ('row_count'='28795080', 'ndv'='16190', 'min_value'='0.00', 'max_value'='20002.89', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table dbgen_version modify column dv_cmdline_args set stats ('row_count'='1', 'ndv'='1', 'min_value'='-SCALE 100 -PARALLEL 10 -CHILD 1 -TERMINATE N -DIR /mnt/datadisk0/doris/tools/tpcds-tools/bin/tpcds-data ', 'max_value'='-SCALE 100 -PARALLEL 10 -CHILD 1 -TERMINATE N -DIR /mnt/datadisk0/doris/tools/tpcds-tools/bin/tpcds-data ', 'avg_size'='105', 'max_size'='105' ) -""" - -sql """ -alter table warehouse modify column w_street_name set stats ('row_count'='15', 'ndv'='15', 'min_value'='', 'max_value'='Wilson Elm', 'avg_size'='128', 'max_size'='128' ) -""" - -sql """ -alter table call_center modify column cc_county set stats ('row_count'='30', 'ndv'='8', 'min_value'='Barrow County', 'max_value'='Ziebach County', 'avg_size'='423', 'max_size'='423' ) -""" - -sql """ -alter table catalog_returns modify column cr_refunded_addr_sk set stats ('row_count'='14404374', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table catalog_returns modify column cr_returning_cdemo_sk set stats ('row_count'='14404374', 'ndv'='1913762', 'min_value'='1', 'max_value'='1920800', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table web_sales modify column ws_ship_hdemo_sk set stats ('row_count'='72001237', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table call_center modify column cc_mkt_id set stats ('row_count'='30', 'ndv'='6', 'min_value'='1', 'max_value'='6', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table store modify column s_store_sk set stats ('row_count'='402', 'ndv'='398', 'min_value'='1', 'max_value'='402', 'avg_size'='3216', 'max_size'='3216' ) -""" - -sql """ -alter table customer_demographics modify column cd_dep_employed_count set stats ('row_count'='1920800', 'ndv'='7', 'min_value'='0', 'max_value'='6', 'avg_size'='7683200', 'max_size'='7683200' ) -""" - -sql """ -alter table catalog_sales modify column cs_ext_list_price set stats ('row_count'='143997065', 'ndv'='29336', 'min_value'='1.00', 'max_value'='29997.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table web_sales modify column ws_bill_cdemo_sk set stats ('row_count'='72001237', 'ndv'='1835731', 'min_value'='1', 'max_value'='1920800', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table web_returns modify column wr_order_number set stats ('row_count'='7197670', 'ndv'='4249346', 'min_value'='1', 'max_value'='5999999', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table web_site modify column web_country set stats ('row_count'='24', 'ndv'='1', 'min_value'='United States', 'max_value'='United States', 'avg_size'='312', 'max_size'='312' ) -""" - -sql """ -alter table web_sales modify column ws_net_profit set stats ('row_count'='72001237', 'ndv'='27958', 'min_value'='-9997.00', 'max_value'='19840.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table customer_demographics modify column cd_dep_college_count set stats ('row_count'='1920800', 'ndv'='7', 'min_value'='0', 'max_value'='6', 'avg_size'='7683200', 'max_size'='7683200' ) -""" - -sql """ -alter table store modify column s_company_name set stats ('row_count'='402', 'ndv'='2', 'min_value'='', 'max_value'='Unknown', 'avg_size'='2793', 'max_size'='2793' ) -""" - -sql """ -alter table web_site modify column web_zip set stats ('row_count'='24', 'ndv'='14', 'min_value'='28828', 'max_value'='78828', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table warehouse modify column w_city set stats ('row_count'='15', 'ndv'='11', 'min_value'='Bethel', 'max_value'='Union', 'avg_size'='111', 'max_size'='111' ) -""" - -sql """ -alter table catalog_sales modify column cs_net_paid_inc_tax set stats ('row_count'='143997065', 'ndv'='28777', 'min_value'='0.00', 'max_value'='31745.52', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table store_returns modify column sr_return_quantity set stats ('row_count'='28795080', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table date_dim modify column d_date_id set stats ('row_count'='73049', 'ndv'='72907', 'min_value'='AAAAAAAAAAAAFCAA', 'max_value'='AAAAAAAAPPPPECAA', 'avg_size'='1168784', 'max_size'='1168784' ) -""" - -sql """ -alter table store_sales modify column ss_net_profit set stats ('row_count'='287997024', 'ndv'='19581', 'min_value'='-10000.00', 'max_value'='9889.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table call_center modify column cc_tax_percentage set stats ('row_count'='30', 'ndv'='1', 'min_value'='0.00', 'max_value'='0.12', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table promotion modify column p_response_targe set stats ('row_count'='1000', 'ndv'='1', 'min_value'='1', 'max_value'='1', 'avg_size'='4000', 'max_size'='4000' ) -""" - -sql """ -alter table time_dim modify column t_second set stats ('row_count'='86400', 'ndv'='60', 'min_value'='0', 'max_value'='59', 'avg_size'='345600', 'max_size'='345600' ) -""" - -sql """ -alter table date_dim modify column d_first_dom set stats ('row_count'='73049', 'ndv'='2410', 'min_value'='2415021', 'max_value'='2488070', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table web_returns modify column wr_return_amt set stats ('row_count'='7197670', 'ndv'='19263', 'min_value'='0.00', 'max_value'='28346.31', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table web_site modify column web_site_sk set stats ('row_count'='24', 'ndv'='24', 'min_value'='1', 'max_value'='24', 'avg_size'='192', 'max_size'='192' ) -""" - -sql """ -alter table catalog_returns modify column cr_ship_mode_sk set stats ('row_count'='14404374', 'ndv'='20', 'min_value'='1', 'max_value'='20', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table warehouse modify column w_suite_number set stats ('row_count'='15', 'ndv'='14', 'min_value'='', 'max_value'='Suite X', 'avg_size'='111', 'max_size'='111' ) -""" - -sql """ -alter table web_page modify column wp_web_page_sk set stats ('row_count'='2040', 'ndv'='2032', 'min_value'='1', 'max_value'='2040', 'avg_size'='16320', 'max_size'='16320' ) -""" - -sql """ -alter table item modify column i_brand_id set stats ('row_count'='204000', 'ndv'='951', 'min_value'='1001001', 'max_value'='10016017', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table store_sales modify column ss_customer_sk set stats ('row_count'='287997024', 'ndv'='1994393', 'min_value'='1', 'max_value'='2000000', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table time_dim modify column t_minute set stats ('row_count'='86400', 'ndv'='60', 'min_value'='0', 'max_value'='59', 'avg_size'='345600', 'max_size'='345600' ) -""" - -sql """ -alter table item modify column i_item_id set stats ('row_count'='204000', 'ndv'='103230', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPBAAA', 'avg_size'='3264000', 'max_size'='3264000' ) -""" - -sql """ -alter table date_dim modify column d_current_day set stats ('row_count'='73049', 'ndv'='1', 'min_value'='N', 'max_value'='N', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table item modify column i_manufact set stats ('row_count'='204000', 'ndv'='1004', 'min_value'='', 'max_value'='pripripri', 'avg_size'='2298787', 'max_size'='2298787' ) -""" - -sql """ -alter table store modify column s_division_id set stats ('row_count'='402', 'ndv'='1', 'min_value'='1', 'max_value'='1', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table dbgen_version modify column dv_create_date set stats ('row_count'='1', 'ndv'='1', 'min_value'='2023-03-16', 'max_value'='2023-03-16', 'avg_size'='4', 'max_size'='4' ) -""" - -sql """ -alter table web_site modify column web_name set stats ('row_count'='24', 'ndv'='4', 'min_value'='site_0', 'max_value'='site_3', 'avg_size'='144', 'max_size'='144' ) -""" - -sql """ -alter table customer_address modify column ca_suite_number set stats ('row_count'='1000000', 'ndv'='76', 'min_value'='', 'max_value'='Suite Y', 'avg_size'='7652799', 'max_size'='7652799' ) -""" - -sql """ -alter table customer modify column c_first_sales_date_sk set stats ('row_count'='2000000', 'ndv'='3644', 'min_value'='2448998', 'max_value'='2452648', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table web_sales modify column ws_order_number set stats ('row_count'='72001237', 'ndv'='6015811', 'min_value'='1', 'max_value'='6000000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table store modify column s_zip set stats ('row_count'='402', 'ndv'='102', 'min_value'='', 'max_value'='79431', 'avg_size'='1980', 'max_size'='1980' ) -""" - -sql """ -alter table promotion modify column p_item_sk set stats ('row_count'='1000', 'ndv'='970', 'min_value'='280', 'max_value'='203966', 'avg_size'='8000', 'max_size'='8000' ) -""" - -sql """ -alter table web_sales modify column ws_ship_cdemo_sk set stats ('row_count'='72001237', 'ndv'='1822804', 'min_value'='1', 'max_value'='1920800', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table web_site modify column web_street_name set stats ('row_count'='24', 'ndv'='24', 'min_value'='11th ', 'max_value'='Wilson Ridge', 'avg_size'='219', 'max_size'='219' ) -""" - -sql """ -alter table catalog_returns modify column cr_returning_hdemo_sk set stats ('row_count'='14404374', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table customer_demographics modify column cd_purchase_estimate set stats ('row_count'='1920800', 'ndv'='20', 'min_value'='500', 'max_value'='10000', 'avg_size'='7683200', 'max_size'='7683200' ) -""" - -sql """ -alter table web_returns modify column wr_refunded_customer_sk set stats ('row_count'='7197670', 'ndv'='1923644', 'min_value'='1', 'max_value'='2000000', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table catalog_sales modify column cs_ship_mode_sk set stats ('row_count'='143997065', 'ndv'='20', 'min_value'='1', 'max_value'='20', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table customer modify column c_birth_year set stats ('row_count'='2000000', 'ndv'='69', 'min_value'='1924', 'max_value'='1992', 'avg_size'='8000000', 'max_size'='8000000' ) -""" - -sql """ -alter table catalog_returns modify column cr_return_tax set stats ('row_count'='14404374', 'ndv'='1926', 'min_value'='0.00', 'max_value'='2390.75', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table web_sales modify column ws_ext_sales_price set stats ('row_count'='72001237', 'ndv'='27115', 'min_value'='0.00', 'max_value'='29810.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table catalog_page modify column cp_catalog_page_number set stats ('row_count'='20400', 'ndv'='189', 'min_value'='1', 'max_value'='188', 'avg_size'='81600', 'max_size'='81600' ) -""" - -sql """ -alter table date_dim modify column d_date_sk set stats ('row_count'='73049', 'ndv'='73042', 'min_value'='2415022', 'max_value'='2488070', 'avg_size'='584392', 'max_size'='584392' ) -""" - -sql """ -alter table date_dim modify column d_month_seq set stats ('row_count'='73049', 'ndv'='2398', 'min_value'='0', 'max_value'='2400', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table inventory modify column inv_item_sk set stats ('row_count'='399330000', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='3194640000', 'max_size'='3194640000' ) -""" - -sql """ -alter table call_center modify column cc_open_date_sk set stats ('row_count'='30', 'ndv'='15', 'min_value'='2450794', 'max_value'='2451146', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table store_sales modify column ss_addr_sk set stats ('row_count'='287997024', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table web_returns modify column wr_returning_addr_sk set stats ('row_count'='7197670', 'ndv'='999584', 'min_value'='1', 'max_value'='1000000', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table store modify column s_market_id set stats ('row_count'='402', 'ndv'='10', 'min_value'='1', 'max_value'='10', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table catalog_sales modify column cs_bill_cdemo_sk set stats ('row_count'='143997065', 'ndv'='1915709', 'min_value'='1', 'max_value'='1920800', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table customer_address modify column ca_address_sk set stats ('row_count'='1000000', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='8000000', 'max_size'='8000000' ) -""" - -sql """ -alter table web_site modify column web_market_manager set stats ('row_count'='24', 'ndv'='21', 'min_value'='Albert Leung', 'max_value'='Zachery Oneil', 'avg_size'='294', 'max_size'='294' ) -""" - -sql """ -alter table item modify column i_rec_start_date set stats ('row_count'='204000', 'ndv'='4', 'min_value'='1997-10-27', 'max_value'='2001-10-27', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table web_sales modify column ws_ship_mode_sk set stats ('row_count'='72001237', 'ndv'='20', 'min_value'='1', 'max_value'='20', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table call_center modify column cc_street_type set stats ('row_count'='30', 'ndv'='9', 'min_value'='Avenue', 'max_value'='Way', 'avg_size'='140', 'max_size'='140' ) -""" - -sql """ -alter table catalog_sales modify column cs_net_paid_inc_ship set stats ('row_count'='143997065', 'ndv'='37890', 'min_value'='0.00', 'max_value'='43725.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table store_returns modify column sr_returned_date_sk set stats ('row_count'='28795080', 'ndv'='2010', 'min_value'='2450820', 'max_value'='2452822', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table item modify column i_category set stats ('row_count'='204000', 'ndv'='11', 'min_value'='', 'max_value'='Women', 'avg_size'='1201703', 'max_size'='1201703' ) -""" - -sql """ -alter table store modify column s_street_type set stats ('row_count'='402', 'ndv'='21', 'min_value'='', 'max_value'='Wy', 'avg_size'='1657', 'max_size'='1657' ) -""" - -sql """ -alter table web_sales modify column ws_ext_list_price set stats ('row_count'='72001237', 'ndv'='29104', 'min_value'='1.02', 'max_value'='29997.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table call_center modify column cc_city set stats ('row_count'='30', 'ndv'='12', 'min_value'='Bethel', 'max_value'='Shady Grove', 'avg_size'='282', 'max_size'='282' ) -""" - -sql """ -alter table household_demographics modify column hd_buy_potential set stats ('row_count'='7200', 'ndv'='6', 'min_value'='0-500', 'max_value'='Unknown', 'avg_size'='54000', 'max_size'='54000' ) -""" - -sql """ -alter table catalog_returns modify column cr_refunded_cdemo_sk set stats ('row_count'='14404374', 'ndv'='1900770', 'min_value'='1', 'max_value'='1920800', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table item modify column i_manager_id set stats ('row_count'='204000', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table customer_address modify column ca_gmt_offset set stats ('row_count'='1000000', 'ndv'='6', 'min_value'='-10.00', 'max_value'='-5.00', 'avg_size'='4000000', 'max_size'='4000000' ) -""" - -sql """ -alter table store modify column s_state set stats ('row_count'='402', 'ndv'='10', 'min_value'='', 'max_value'='TN', 'avg_size'='800', 'max_size'='800' ) -""" - -sql """ -alter table catalog_returns modify column cr_refunded_customer_sk set stats ('row_count'='14404374', 'ndv'='1977657', 'min_value'='1', 'max_value'='2000000', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table item modify column i_product_name set stats ('row_count'='204000', 'ndv'='200390', 'min_value'='', 'max_value'='pripripripripriought', 'avg_size'='4546148', 'max_size'='4546148' ) -""" - -sql """ -alter table store_returns modify column sr_addr_sk set stats ('row_count'='28795080', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table item modify column i_category_id set stats ('row_count'='204000', 'ndv'='10', 'min_value'='1', 'max_value'='10', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table store_returns modify column sr_return_ship_cost set stats ('row_count'='28795080', 'ndv'='8186', 'min_value'='0.00', 'max_value'='9578.25', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table catalog_sales modify column cs_sold_time_sk set stats ('row_count'='143997065', 'ndv'='87677', 'min_value'='0', 'max_value'='86399', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table date_dim modify column d_day_name set stats ('row_count'='73049', 'ndv'='7', 'min_value'='Friday', 'max_value'='Wednesday', 'avg_size'='521779', 'max_size'='521779' ) -""" - -sql """ -alter table web_returns modify column wr_web_page_sk set stats ('row_count'='7197670', 'ndv'='2032', 'min_value'='1', 'max_value'='2040', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table store modify column s_street_number set stats ('row_count'='402', 'ndv'='267', 'min_value'='', 'max_value'='986', 'avg_size'='1150', 'max_size'='1150' ) -""" - -sql """ -alter table web_sales modify column ws_sold_time_sk set stats ('row_count'='72001237', 'ndv'='87677', 'min_value'='0', 'max_value'='86399', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table store_sales modify column ss_ext_tax set stats ('row_count'='287997024', 'ndv'='1722', 'min_value'='0.00', 'max_value'='1762.38', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table date_dim modify column d_dow set stats ('row_count'='73049', 'ndv'='7', 'min_value'='0', 'max_value'='6', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table store_returns modify column sr_refunded_cash set stats ('row_count'='28795080', 'ndv'='12626', 'min_value'='0.00', 'max_value'='17556.95', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table call_center modify column cc_call_center_sk set stats ('row_count'='30', 'ndv'='30', 'min_value'='1', 'max_value'='30', 'avg_size'='240', 'max_size'='240' ) -""" - -sql """ -alter table store_returns modify column sr_fee set stats ('row_count'='28795080', 'ndv'='101', 'min_value'='0.50', 'max_value'='100.00', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table catalog_returns modify column cr_return_ship_cost set stats ('row_count'='14404374', 'ndv'='11144', 'min_value'='0.00', 'max_value'='14130.96', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table catalog_sales modify column cs_bill_addr_sk set stats ('row_count'='143997065', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table time_dim modify column t_time_id set stats ('row_count'='86400', 'ndv'='85663', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPAAAA', 'avg_size'='1382400', 'max_size'='1382400' ) -""" - -sql """ -alter table catalog_sales modify column cs_net_paid set stats ('row_count'='143997065', 'ndv'='27448', 'min_value'='0.00', 'max_value'='29760.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table catalog_sales modify column cs_bill_customer_sk set stats ('row_count'='143997065', 'ndv'='1993691', 'min_value'='1', 'max_value'='2000000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table web_sales modify column ws_coupon_amt set stats ('row_count'='72001237', 'ndv'='20659', 'min_value'='0.00', 'max_value'='27591.16', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table promotion modify column p_promo_sk set stats ('row_count'='1000', 'ndv'='986', 'min_value'='1', 'max_value'='1000', 'avg_size'='8000', 'max_size'='8000' ) -""" - -sql """ -alter table web_page modify column wp_rec_end_date set stats ('row_count'='2040', 'ndv'='3', 'min_value'='1999-09-03', 'max_value'='2001-09-02', 'avg_size'='8160', 'max_size'='8160' ) -""" - -sql """ -alter table web_returns modify column wr_refunded_addr_sk set stats ('row_count'='7197670', 'ndv'='999503', 'min_value'='1', 'max_value'='1000000', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table web_page modify column wp_char_count set stats ('row_count'='2040', 'ndv'='1363', 'min_value'='303', 'max_value'='8523', 'avg_size'='8160', 'max_size'='8160' ) -""" - -sql """ -alter table promotion modify column p_purpose set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='Unknown', 'avg_size'='6909', 'max_size'='6909' ) -""" - -sql """ -alter table web_sales modify column ws_ship_date_sk set stats ('row_count'='72001237', 'ndv'='1952', 'min_value'='2450817', 'max_value'='2452762', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table date_dim modify column d_current_year set stats ('row_count'='73049', 'ndv'='2', 'min_value'='N', 'max_value'='Y', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table store_sales modify column ss_net_paid set stats ('row_count'='287997024', 'ndv'='19028', 'min_value'='0.00', 'max_value'='19878.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table web_returns modify column wr_returned_date_sk set stats ('row_count'='7197670', 'ndv'='2185', 'min_value'='2450820', 'max_value'='2453002', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table store_returns modify column sr_cdemo_sk set stats ('row_count'='28795080', 'ndv'='1916366', 'min_value'='1', 'max_value'='1920800', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table catalog_page modify column cp_description set stats ('row_count'='20400', 'ndv'='20501', 'min_value'='', 'max_value'='Youngsters should get very. Bad, necessary years must pick telecommunications. Co', 'avg_size'='1507423', 'max_size'='1507423' ) -""" - -sql """ -alter table catalog_sales modify column cs_ext_tax set stats ('row_count'='143997065', 'ndv'='2488', 'min_value'='0.00', 'max_value'='2619.36', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table date_dim modify column d_holiday set stats ('row_count'='73049', 'ndv'='2', 'min_value'='N', 'max_value'='Y', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table catalog_sales modify column cs_ext_discount_amt set stats ('row_count'='143997065', 'ndv'='27722', 'min_value'='0.00', 'max_value'='29765.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table warehouse modify column w_zip set stats ('row_count'='15', 'ndv'='15', 'min_value'='28721', 'max_value'='78721', 'avg_size'='75', 'max_size'='75' ) -""" - -sql """ -alter table catalog_returns modify column cr_catalog_page_sk set stats ('row_count'='14404374', 'ndv'='11515', 'min_value'='1', 'max_value'='17108', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table catalog_returns modify column cr_order_number set stats ('row_count'='14404374', 'ndv'='9425725', 'min_value'='2', 'max_value'='16000000', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table catalog_sales modify column cs_ship_cdemo_sk set stats ('row_count'='143997065', 'ndv'='1916125', 'min_value'='1', 'max_value'='1920800', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table catalog_returns modify column cr_returned_time_sk set stats ('row_count'='14404374', 'ndv'='87677', 'min_value'='0', 'max_value'='86399', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table web_sales modify column ws_ext_wholesale_cost set stats ('row_count'='72001237', 'ndv'='10009', 'min_value'='1.00', 'max_value'='10000.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table web_page modify column wp_image_count set stats ('row_count'='2040', 'ndv'='7', 'min_value'='1', 'max_value'='7', 'avg_size'='8160', 'max_size'='8160' ) -""" - -sql """ -alter table time_dim modify column t_shift set stats ('row_count'='86400', 'ndv'='3', 'min_value'='first', 'max_value'='third', 'avg_size'='460800', 'max_size'='460800' ) -""" - -sql """ -alter table store_sales modify column ss_ext_discount_amt set stats ('row_count'='287997024', 'ndv'='16198', 'min_value'='0.00', 'max_value'='19225.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table warehouse modify column w_warehouse_sk set stats ('row_count'='15', 'ndv'='15', 'min_value'='1', 'max_value'='15', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table store_sales modify column ss_sold_time_sk set stats ('row_count'='287997024', 'ndv'='47252', 'min_value'='28800', 'max_value'='75599', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table customer_address modify column ca_street_name set stats ('row_count'='1000000', 'ndv'='8155', 'min_value'='', 'max_value'='Woodland Woodland', 'avg_size'='8445649', 'max_size'='8445649' ) -""" - -sql """ -alter table customer_address modify column ca_county set stats ('row_count'='1000000', 'ndv'='1825', 'min_value'='', 'max_value'='Ziebach County', 'avg_size'='13540273', 'max_size'='13540273' ) -""" - -sql """ -alter table ship_mode modify column sm_contract set stats ('row_count'='20', 'ndv'='20', 'min_value'='2mM8l', 'max_value'='yVfotg7Tio3MVhBg6Bkn', 'avg_size'='252', 'max_size'='252' ) -""" - -sql """ -alter table customer_address modify column ca_zip set stats ('row_count'='1000000', 'ndv'='7733', 'min_value'='', 'max_value'='99981', 'avg_size'='4848150', 'max_size'='4848150' ) -""" - -sql """ -alter table store modify column s_county set stats ('row_count'='402', 'ndv'='10', 'min_value'='', 'max_value'='Ziebach County', 'avg_size'='5693', 'max_size'='5693' ) -""" - -sql """ -alter table promotion modify column p_channel_tv set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='986', 'max_size'='986' ) -""" - -sql """ -alter table time_dim modify column t_time_sk set stats ('row_count'='86400', 'ndv'='87677', 'min_value'='0', 'max_value'='86399', 'avg_size'='691200', 'max_size'='691200' ) -""" - -sql """ -alter table date_dim modify column d_following_holiday set stats ('row_count'='73049', 'ndv'='2', 'min_value'='N', 'max_value'='Y', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table store_returns modify column sr_return_time_sk set stats ('row_count'='28795080', 'ndv'='32660', 'min_value'='28799', 'max_value'='61199', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table catalog_sales modify column cs_ext_ship_cost set stats ('row_count'='143997065', 'ndv'='14266', 'min_value'='0.00', 'max_value'='14896.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table item modify column i_brand set stats ('row_count'='204000', 'ndv'='714', 'min_value'='', 'max_value'='univunivamalg #9', 'avg_size'='3287671', 'max_size'='3287671' ) -""" - -sql """ -alter table customer modify column c_current_addr_sk set stats ('row_count'='2000000', 'ndv'='866672', 'min_value'='1', 'max_value'='1000000', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table store modify column s_floor_space set stats ('row_count'='402', 'ndv'='300', 'min_value'='5004767', 'max_value'='9997773', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table inventory modify column inv_warehouse_sk set stats ('row_count'='399330000', 'ndv'='15', 'min_value'='1', 'max_value'='15', 'avg_size'='3194640000', 'max_size'='3194640000' ) -""" - -sql """ -alter table web_site modify column web_county set stats ('row_count'='24', 'ndv'='9', 'min_value'='Barrow County', 'max_value'='Ziebach County', 'avg_size'='331', 'max_size'='331' ) -""" - -sql """ -alter table call_center modify column cc_rec_start_date set stats ('row_count'='30', 'ndv'='4', 'min_value'='1998-01-01', 'max_value'='2002-01-01', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table date_dim modify column d_quarter_name set stats ('row_count'='73049', 'ndv'='799', 'min_value'='1900Q1', 'max_value'='2100Q1', 'avg_size'='438294', 'max_size'='438294' ) -""" - -sql """ -alter table call_center modify column cc_company_name set stats ('row_count'='30', 'ndv'='6', 'min_value'='able', 'max_value'='pri', 'avg_size'='110', 'max_size'='110' ) -""" - -sql """ -alter table customer_demographics modify column cd_credit_rating set stats ('row_count'='1920800', 'ndv'='4', 'min_value'='Good', 'max_value'='Unknown', 'avg_size'='13445600', 'max_size'='13445600' ) -""" - -sql """ -alter table web_returns modify column wr_return_amt_inc_tax set stats ('row_count'='7197670', 'ndv'='19975', 'min_value'='0.00', 'max_value'='29493.38', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table web_site modify column web_company_id set stats ('row_count'='24', 'ndv'='6', 'min_value'='1', 'max_value'='6', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table date_dim modify column d_qoy set stats ('row_count'='73049', 'ndv'='4', 'min_value'='1', 'max_value'='4', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table catalog_sales modify column cs_quantity set stats ('row_count'='143997065', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table web_sales modify column ws_ext_ship_cost set stats ('row_count'='72001237', 'ndv'='13977', 'min_value'='0.00', 'max_value'='14927.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table catalog_sales modify column cs_list_price set stats ('row_count'='143997065', 'ndv'='301', 'min_value'='1.00', 'max_value'='300.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table call_center modify column cc_zip set stats ('row_count'='30', 'ndv'='14', 'min_value'='20059', 'max_value'='75281', 'avg_size'='150', 'max_size'='150' ) -""" - -sql """ -alter table call_center modify column cc_division_name set stats ('row_count'='30', 'ndv'='6', 'min_value'='able', 'max_value'='pri', 'avg_size'='123', 'max_size'='123' ) -""" - -sql """ -alter table store_sales modify column ss_cdemo_sk set stats ('row_count'='287997024', 'ndv'='1916366', 'min_value'='1', 'max_value'='1920800', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table catalog_sales modify column cs_ext_sales_price set stats ('row_count'='143997065', 'ndv'='27598', 'min_value'='0.00', 'max_value'='29808.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table catalog_returns modify column cr_return_amt_inc_tax set stats ('row_count'='14404374', 'ndv'='21566', 'min_value'='0.00', 'max_value'='29353.87', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table income_band modify column ib_upper_bound set stats ('row_count'='20', 'ndv'='20', 'min_value'='10000', 'max_value'='200000', 'avg_size'='80', 'max_size'='80' ) -""" - -sql """ -alter table item modify column i_color set stats ('row_count'='204000', 'ndv'='93', 'min_value'='', 'max_value'='yellow', 'avg_size'='1094247', 'max_size'='1094247' ) -""" - -sql """ -alter table catalog_sales modify column cs_ship_hdemo_sk set stats ('row_count'='143997065', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table store modify column s_tax_precentage set stats ('row_count'='402', 'ndv'='1', 'min_value'='0.00', 'max_value'='0.11', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table item modify column i_units set stats ('row_count'='204000', 'ndv'='22', 'min_value'='', 'max_value'='Unknown', 'avg_size'='852562', 'max_size'='852562' ) -""" - -sql """ -alter table reason modify column r_reason_id set stats ('row_count'='55', 'ndv'='55', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPCAAAAAA', 'avg_size'='880', 'max_size'='880' ) -""" - -sql """ -alter table store_sales modify column ss_ext_list_price set stats ('row_count'='287997024', 'ndv'='19770', 'min_value'='1.00', 'max_value'='20000.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table promotion modify column p_cost set stats ('row_count'='1000', 'ndv'='1', 'min_value'='1000.00', 'max_value'='1000.00', 'avg_size'='8000', 'max_size'='8000' ) -""" - -sql """ -alter table web_site modify column web_state set stats ('row_count'='24', 'ndv'='9', 'min_value'='AL', 'max_value'='TN', 'avg_size'='48', 'max_size'='48' ) -""" - -sql """ -alter table call_center modify column cc_country set stats ('row_count'='30', 'ndv'='1', 'min_value'='United States', 'max_value'='United States', 'avg_size'='390', 'max_size'='390' ) -""" - -sql """ -alter table store modify column s_company_id set stats ('row_count'='402', 'ndv'='1', 'min_value'='1', 'max_value'='1', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table time_dim modify column t_hour set stats ('row_count'='86400', 'ndv'='24', 'min_value'='0', 'max_value'='23', 'avg_size'='345600', 'max_size'='345600' ) -""" - -sql """ -alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='73049', 'ndv'='801', 'min_value'='1', 'max_value'='801', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table ship_mode modify column sm_code set stats ('row_count'='20', 'ndv'='4', 'min_value'='AIR', 'max_value'='SURFACE', 'avg_size'='87', 'max_size'='87' ) -""" - -sql """ -alter table web_returns modify column wr_returning_hdemo_sk set stats ('row_count'='7197670', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table catalog_returns modify column cr_call_center_sk set stats ('row_count'='14404374', 'ndv'='30', 'min_value'='1', 'max_value'='30', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table household_demographics modify column hd_demo_sk set stats ('row_count'='7200', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='57600', 'max_size'='57600' ) -""" - -sql """ -alter table catalog_returns modify column cr_net_loss set stats ('row_count'='14404374', 'ndv'='11753', 'min_value'='0.50', 'max_value'='15781.83', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table catalog_returns modify column cr_item_sk set stats ('row_count'='14404374', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table store_returns modify column sr_item_sk set stats ('row_count'='28795080', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table call_center modify column cc_street_number set stats ('row_count'='30', 'ndv'='15', 'min_value'='406', 'max_value'='984', 'avg_size'='88', 'max_size'='88' ) -""" - -sql """ -alter table promotion modify column p_channel_radio set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='987', 'max_size'='987' ) -""" - -sql """ -alter table call_center modify column cc_name set stats ('row_count'='30', 'ndv'='15', 'min_value'='California', 'max_value'='Pacific Northwest_1', 'avg_size'='401', 'max_size'='401' ) -""" - -sql """ -alter table call_center modify column cc_rec_end_date set stats ('row_count'='30', 'ndv'='3', 'min_value'='2000-01-01', 'max_value'='2001-12-31', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table customer_demographics modify column cd_dep_count set stats ('row_count'='1920800', 'ndv'='7', 'min_value'='0', 'max_value'='6', 'avg_size'='7683200', 'max_size'='7683200' ) -""" - -sql """ -alter table inventory modify column inv_date_sk set stats ('row_count'='399330000', 'ndv'='261', 'min_value'='2450815', 'max_value'='2452635', 'avg_size'='3194640000', 'max_size'='3194640000' ) -""" - -sql """ -alter table customer_demographics modify column cd_demo_sk set stats ('row_count'='1920800', 'ndv'='1916366', 'min_value'='1', 'max_value'='1920800', 'avg_size'='15366400', 'max_size'='15366400' ) -""" - -sql """ -alter table ship_mode modify column sm_ship_mode_sk set stats ('row_count'='20', 'ndv'='20', 'min_value'='1', 'max_value'='20', 'avg_size'='160', 'max_size'='160' ) -""" - -sql """ -alter table store_sales modify column ss_list_price set stats ('row_count'='287997024', 'ndv'='201', 'min_value'='1.00', 'max_value'='200.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table reason modify column r_reason_sk set stats ('row_count'='55', 'ndv'='55', 'min_value'='1', 'max_value'='55', 'avg_size'='440', 'max_size'='440' ) -""" - -sql """ -alter table web_page modify column wp_autogen_flag set stats ('row_count'='2040', 'ndv'='3', 'min_value'='', 'max_value'='Y', 'avg_size'='2015', 'max_size'='2015' ) -""" - -sql """ -alter table web_sales modify column ws_sold_date_sk set stats ('row_count'='72001237', 'ndv'='1820', 'min_value'='2450816', 'max_value'='2452642', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table catalog_returns modify column cr_returning_addr_sk set stats ('row_count'='14404374', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table web_site modify column web_street_type set stats ('row_count'='24', 'ndv'='15', 'min_value'='Avenue', 'max_value'='Wy', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table store modify column s_rec_end_date set stats ('row_count'='402', 'ndv'='3', 'min_value'='1999-03-13', 'max_value'='2001-03-12', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table item modify column i_formulation set stats ('row_count'='204000', 'ndv'='152702', 'min_value'='', 'max_value'='yellow98911509228741', 'avg_size'='4069400', 'max_size'='4069400' ) -""" - -sql """ -alter table customer_demographics modify column cd_education_status set stats ('row_count'='1920800', 'ndv'='7', 'min_value'='2 yr Degree', 'max_value'='Unknown', 'avg_size'='18384800', 'max_size'='18384800' ) -""" - -sql """ -alter table web_page modify column wp_link_count set stats ('row_count'='2040', 'ndv'='24', 'min_value'='2', 'max_value'='25', 'avg_size'='8160', 'max_size'='8160' ) -""" - -sql """ -alter table warehouse modify column w_country set stats ('row_count'='15', 'ndv'='1', 'min_value'='United States', 'max_value'='United States', 'avg_size'='195', 'max_size'='195' ) -""" - -sql """ -alter table catalog_returns modify column cr_store_credit set stats ('row_count'='14404374', 'ndv'='12156', 'min_value'='0.00', 'max_value'='22167.49', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table store modify column s_rec_start_date set stats ('row_count'='402', 'ndv'='4', 'min_value'='1997-03-13', 'max_value'='2001-03-13', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table web_site modify column web_site_id set stats ('row_count'='24', 'ndv'='12', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAOAAAAAAA', 'avg_size'='384', 'max_size'='384' ) -""" - -sql """ -alter table call_center modify column cc_gmt_offset set stats ('row_count'='30', 'ndv'='2', 'min_value'='-6.00', 'max_value'='-5.00', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table ship_mode modify column sm_ship_mode_id set stats ('row_count'='20', 'ndv'='20', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPAAAAAAA', 'avg_size'='320', 'max_size'='320' ) -""" - -sql """ -alter table catalog_returns modify column cr_return_amount set stats ('row_count'='14404374', 'ndv'='20656', 'min_value'='0.00', 'max_value'='28778.31', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table store modify column s_hours set stats ('row_count'='402', 'ndv'='4', 'min_value'='', 'max_value'='8AM-8AM', 'avg_size'='2848', 'max_size'='2848' ) -""" - -sql """ -alter table web_returns modify column wr_returning_cdemo_sk set stats ('row_count'='7197670', 'ndv'='1865149', 'min_value'='1', 'max_value'='1920800', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table catalog_sales modify column cs_warehouse_sk set stats ('row_count'='143997065', 'ndv'='15', 'min_value'='1', 'max_value'='15', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table date_dim modify column d_date set stats ('row_count'='73049', 'ndv'='73250', 'min_value'='1900-01-02', 'max_value'='2100-01-01', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table customer modify column c_first_name set stats ('row_count'='2000000', 'ndv'='5140', 'min_value'='', 'max_value'='Zulma', 'avg_size'='11267996', 'max_size'='11267996' ) -""" - -sql """ -alter table catalog_sales modify column cs_net_profit set stats ('row_count'='143997065', 'ndv'='28450', 'min_value'='-10000.00', 'max_value'='19840.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table web_site modify column web_suite_number set stats ('row_count'='24', 'ndv'='20', 'min_value'='Suite 130', 'max_value'='Suite U', 'avg_size'='196', 'max_size'='196' ) -""" - -sql """ -alter table web_sales modify column ws_list_price set stats ('row_count'='72001237', 'ndv'='301', 'min_value'='1.00', 'max_value'='300.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table web_returns modify column wr_returned_time_sk set stats ('row_count'='7197670', 'ndv'='87677', 'min_value'='0', 'max_value'='86399', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table web_sales modify column ws_net_paid_inc_tax set stats ('row_count'='72001237', 'ndv'='28263', 'min_value'='0.00', 'max_value'='32492.90', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table store_returns modify column sr_net_loss set stats ('row_count'='28795080', 'ndv'='8663', 'min_value'='0.50', 'max_value'='10447.72', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table date_dim modify column d_same_day_lq set stats ('row_count'='73049', 'ndv'='72231', 'min_value'='2414930', 'max_value'='2487978', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table store modify column s_suite_number set stats ('row_count'='402', 'ndv'='75', 'min_value'='', 'max_value'='Suite Y', 'avg_size'='3140', 'max_size'='3140' ) -""" - -sql """ -alter table catalog_page modify column cp_start_date_sk set stats ('row_count'='20400', 'ndv'='91', 'min_value'='2450815', 'max_value'='2453005', 'avg_size'='81600', 'max_size'='81600' ) -""" - -sql """ -alter table customer_address modify column ca_street_number set stats ('row_count'='1000000', 'ndv'='1002', 'min_value'='', 'max_value'='999', 'avg_size'='2805540', 'max_size'='2805540' ) -""" - -sql """ -alter table item modify column i_current_price set stats ('row_count'='204000', 'ndv'='100', 'min_value'='0.09', 'max_value'='99.99', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table store_returns modify column sr_ticket_number set stats ('row_count'='28795080', 'ndv'='16790866', 'min_value'='1', 'max_value'='23999996', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table catalog_sales modify column cs_coupon_amt set stats ('row_count'='143997065', 'ndv'='22020', 'min_value'='0.00', 'max_value'='28422.94', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table date_dim modify column d_current_month set stats ('row_count'='73049', 'ndv'='2', 'min_value'='N', 'max_value'='Y', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table web_sales modify column ws_net_paid_inc_ship_tax set stats ('row_count'='72001237', 'ndv'='37541', 'min_value'='0.00', 'max_value'='44479.52', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table web_sales modify column ws_promo_sk set stats ('row_count'='72001237', 'ndv'='986', 'min_value'='1', 'max_value'='1000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table customer modify column c_first_shipto_date_sk set stats ('row_count'='2000000', 'ndv'='3644', 'min_value'='2449028', 'max_value'='2452678', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table catalog_page modify column cp_end_date_sk set stats ('row_count'='20400', 'ndv'='97', 'min_value'='2450844', 'max_value'='2453186', 'avg_size'='81600', 'max_size'='81600' ) -""" - -sql """ -alter table store_sales modify column ss_promo_sk set stats ('row_count'='287997024', 'ndv'='986', 'min_value'='1', 'max_value'='1000', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table catalog_page modify column cp_type set stats ('row_count'='20400', 'ndv'='4', 'min_value'='', 'max_value'='quarterly', 'avg_size'='155039', 'max_size'='155039' ) -""" - -sql """ -alter table promotion modify column p_channel_demo set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='984', 'max_size'='984' ) -""" - -sql """ -alter table store modify column s_market_manager set stats ('row_count'='402', 'ndv'='286', 'min_value'='', 'max_value'='Zane Perez', 'avg_size'='5129', 'max_size'='5129' ) -""" - -sql """ -alter table item modify column i_item_desc set stats ('row_count'='204000', 'ndv'='148398', 'min_value'='', 'max_value'='Youngsters used to save quite colour', 'avg_size'='20471814', 'max_size'='20471814' ) -""" - -sql """ -alter table call_center modify column cc_division set stats ('row_count'='30', 'ndv'='6', 'min_value'='1', 'max_value'='6', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table web_site modify column web_class set stats ('row_count'='24', 'ndv'='1', 'min_value'='Unknown', 'max_value'='Unknown', 'avg_size'='168', 'max_size'='168' ) -""" - -sql """ -alter table store modify column s_geography_class set stats ('row_count'='402', 'ndv'='2', 'min_value'='', 'max_value'='Unknown', 'avg_size'='2793', 'max_size'='2793' ) -""" - -sql """ -alter table store_returns modify column sr_store_sk set stats ('row_count'='28795080', 'ndv'='200', 'min_value'='1', 'max_value'='400', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table call_center modify column cc_street_name set stats ('row_count'='30', 'ndv'='15', 'min_value'='1st ', 'max_value'='View ', 'avg_size'='240', 'max_size'='240' ) -""" - -sql """ -alter table date_dim modify column d_moy set stats ('row_count'='73049', 'ndv'='12', 'min_value'='1', 'max_value'='12', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table customer modify column c_current_hdemo_sk set stats ('row_count'='2000000', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table customer modify column c_login set stats ('row_count'='2000000', 'ndv'='1', 'min_value'='', 'max_value'='', 'avg_size'='0', 'max_size'='0' ) -""" - -sql """ -alter table web_sales modify column ws_ext_discount_amt set stats ('row_count'='72001237', 'ndv'='27052', 'min_value'='0.00', 'max_value'='29982.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table call_center modify column cc_call_center_id set stats ('row_count'='30', 'ndv'='15', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAOAAAAAAA', 'avg_size'='480', 'max_size'='480' ) -""" - -sql """ -alter table web_returns modify column wr_reversed_charge set stats ('row_count'='7197670', 'ndv'='10979', 'min_value'='0.00', 'max_value'='22972.36', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table store modify column s_city set stats ('row_count'='402', 'ndv'='19', 'min_value'='', 'max_value'='Union', 'avg_size'='3669', 'max_size'='3669' ) -""" - -sql """ -alter table promotion modify column p_channel_email set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='987', 'max_size'='987' ) -""" - -sql """ -alter table catalog_page modify column cp_department set stats ('row_count'='20400', 'ndv'='2', 'min_value'='', 'max_value'='DEPARTMENT', 'avg_size'='201950', 'max_size'='201950' ) -""" - -sql """ -alter table call_center modify column cc_hours set stats ('row_count'='30', 'ndv'='3', 'min_value'='8AM-12AM', 'max_value'='8AM-8AM', 'avg_size'='214', 'max_size'='214' ) -""" - -sql """ -alter table promotion modify column p_channel_dmail set stats ('row_count'='1000', 'ndv'='3', 'min_value'='', 'max_value'='Y', 'avg_size'='987', 'max_size'='987' ) -""" - -sql """ -alter table store modify column s_manager set stats ('row_count'='402', 'ndv'='301', 'min_value'='', 'max_value'='Zachary Price', 'avg_size'='5075', 'max_size'='5075' ) -""" - -sql """ -alter table store_returns modify column sr_reversed_charge set stats ('row_count'='28795080', 'ndv'='9872', 'min_value'='0.00', 'max_value'='16099.52', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table catalog_sales modify column cs_call_center_sk set stats ('row_count'='143997065', 'ndv'='30', 'min_value'='1', 'max_value'='30', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table household_demographics modify column hd_vehicle_count set stats ('row_count'='7200', 'ndv'='6', 'min_value'='-1', 'max_value'='4', 'avg_size'='28800', 'max_size'='28800' ) -""" - -sql """ -alter table web_site modify column web_company_name set stats ('row_count'='24', 'ndv'='6', 'min_value'='able', 'max_value'='pri', 'avg_size'='97', 'max_size'='97' ) -""" - -sql """ -alter table web_page modify column wp_web_page_id set stats ('row_count'='2040', 'ndv'='1019', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPEAAAAA', 'avg_size'='32640', 'max_size'='32640' ) -""" - -sql """ -alter table store_sales modify column ss_sold_date_sk set stats ('row_count'='287997024', 'ndv'='1820', 'min_value'='2450816', 'max_value'='2452642', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table customer_address modify column ca_street_type set stats ('row_count'='1000000', 'ndv'='21', 'min_value'='', 'max_value'='Wy', 'avg_size'='4073296', 'max_size'='4073296' ) -""" - -sql """ -alter table web_sales modify column ws_ext_tax set stats ('row_count'='72001237', 'ndv'='2466', 'min_value'='0.00', 'max_value'='2682.90', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table item modify column i_manufact_id set stats ('row_count'='204000', 'ndv'='1005', 'min_value'='1', 'max_value'='1000', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table inventory modify column inv_quantity_on_hand set stats ('row_count'='399330000', 'ndv'='1006', 'min_value'='0', 'max_value'='1000', 'avg_size'='1597320000', 'max_size'='1597320000' ) -""" - -sql """ -alter table call_center modify column cc_employees set stats ('row_count'='30', 'ndv'='22', 'min_value'='2935', 'max_value'='69020', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table ship_mode modify column sm_carrier set stats ('row_count'='20', 'ndv'='20', 'min_value'='AIRBORNE', 'max_value'='ZOUROS', 'avg_size'='133', 'max_size'='133' ) -""" - -sql """ -alter table store_returns modify column sr_reason_sk set stats ('row_count'='28795080', 'ndv'='55', 'min_value'='1', 'max_value'='55', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table promotion modify column p_discount_active set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='981', 'max_size'='981' ) -""" - -sql """ -alter table catalog_sales modify column cs_bill_hdemo_sk set stats ('row_count'='143997065', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table store_sales modify column ss_wholesale_cost set stats ('row_count'='287997024', 'ndv'='100', 'min_value'='1.00', 'max_value'='100.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='Y', 'max_value'='Y', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='2003-01-01', 'max_value'='2003-12-31', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='1999-01-01', 'max_value'='1999-12-31', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='35793', 'ndv'='35630', 'num_nulls'='0', 'min_value'='1900-01-02', 'max_value'='1997-12-31', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2002-01-01', 'max_value'='2002-12-31', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='35065', 'ndv'='35118', 'num_nulls'='0', 'min_value'='2004-01-01', 'max_value'='2100-01-01', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2001-01-01', 'max_value'='2001-12-31', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='1998-01-01', 'max_value'='1998-12-31', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='366', 'ndv'='366', 'num_nulls'='0', 'min_value'='2000-01-01', 'max_value'='2000-12-31', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='35793', 'ndv'='35428', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAFCAA', 'max_value'='AAAAAAAAPPPPECAA', 'data_size'='572688') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='368', 'num_nulls'='0', 'min_value'='AAAAAAAAAAGGFCAA', 'max_value'='AAAAAAAAPPFGFCAA', 'data_size'='5840') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='AAAAAAAAAAHGFCAA', 'max_value'='AAAAAAAAPPHGFCAA', 'data_size'='5840') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='AAAAAAAAAANGFCAA', 'max_value'='AAAAAAAAPPNGFCAA', 'data_size'='5840') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='AAAAAAAAAAKGFCAA', 'max_value'='AAAAAAAAPPKGFCAA', 'data_size'='5840') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='35065', 'ndv'='35542', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAHFCAA', 'max_value'='AAAAAAAAPPPOFCAA', 'data_size'='561040') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='AAAAAAAAAAMGFCAA', 'max_value'='AAAAAAAAPPLGFCAA', 'data_size'='5840') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='366', 'ndv'='362', 'num_nulls'='0', 'min_value'='AAAAAAAAAAJGFCAA', 'max_value'='AAAAAAAAPPIGFCAA', 'data_size'='5856') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2451179', 'data_size'='2920') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='2451180', 'max_value'='2451544', 'data_size'='2920') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2451911', 'max_value'='2452275', 'data_size'='2920') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='366', 'ndv'='367', 'num_nulls'='0', 'min_value'='2451545', 'max_value'='2451910', 'data_size'='2928') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='35065', 'ndv'='35067', 'num_nulls'='0', 'min_value'='2453006', 'max_value'='2488070', 'data_size'='280520') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='365', 'num_nulls'='0', 'min_value'='2452276', 'max_value'='2452640', 'data_size'='2920') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='35793', 'ndv'='36266', 'num_nulls'='0', 'min_value'='2415022', 'max_value'='2450814', 'data_size'='286344') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='365', 'num_nulls'='0', 'min_value'='2452641', 'max_value'='2453005', 'data_size'='2920') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='366', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2614') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='35065', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='250466') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2608') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='35793', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='255663') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2607') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2606') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2606') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2609') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='35793', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='35065', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='366', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='35793', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='366', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='35065', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451911', 'max_value'='2452245', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451180', 'max_value'='2451514', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='35793', 'ndv'='1181', 'num_nulls'='0', 'min_value'='2415021', 'max_value'='2450784', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451545', 'max_value'='2451880', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452641', 'max_value'='2452975', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452276', 'max_value'='2452610', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='35065', 'ndv'='1161', 'num_nulls'='0', 'min_value'='2453006', 'max_value'='2488070', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2451149', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='35065', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='35793', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='366', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='413', 'max_value'='417', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='366', 'ndv'='5', 'num_nulls'='0', 'min_value'='401', 'max_value'='405', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='35793', 'ndv'='394', 'num_nulls'='0', 'min_value'='1', 'max_value'='393', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='393', 'max_value'='397', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='405', 'max_value'='409', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='397', 'max_value'='401', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='409', 'max_value'='413', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='35065', 'ndv'='387', 'num_nulls'='0', 'min_value'='417', 'max_value'='801', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5375', 'max_value'='5427', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5270', 'max_value'='5322', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5323', 'max_value'='5375', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='35793', 'ndv'='5136', 'num_nulls'='0', 'min_value'='1', 'max_value'='5114', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='35065', 'ndv'='5008', 'num_nulls'='0', 'min_value'='5427', 'max_value'='10436', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='366', 'ndv'='53', 'num_nulls'='0', 'min_value'='5218', 'max_value'='5270', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5114', 'max_value'='5166', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5166', 'max_value'='5218', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='2000', 'max_value'='2000', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1998', 'max_value'='1998', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2002', 'max_value'='2002', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1999', 'max_value'='1999', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='35065', 'ndv'='97', 'num_nulls'='0', 'min_value'='2004', 'max_value'='2100', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='35793', 'ndv'='98', 'num_nulls'='0', 'min_value'='1900', 'max_value'='1997', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2001', 'max_value'='2001', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2003', 'max_value'='2003', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='35065', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='366', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='35793', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452275', 'max_value'='2452943', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451910', 'max_value'='2452578', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='35793', 'ndv'='1186', 'num_nulls'='0', 'min_value'='2415020', 'max_value'='2451117', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452640', 'max_value'='2453308', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451179', 'max_value'='2451847', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='35065', 'ndv'='1144', 'num_nulls'='0', 'min_value'='2453005', 'max_value'='2488372', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451544', 'max_value'='2452214', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2450814', 'max_value'='2451482', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1188', 'max_value'='1199', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='35793', 'ndv'='1176', 'num_nulls'='0', 'min_value'='0', 'max_value'='1175', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1176', 'max_value'='1187', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1236', 'max_value'='1247', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='35065', 'ndv'='1147', 'num_nulls'='0', 'min_value'='1248', 'max_value'='2400', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1212', 'max_value'='1223', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='1200', 'max_value'='1211', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1224', 'max_value'='1235', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='35793', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='35065', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='366', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='35793', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='35065', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='2001Q1', 'max_value'='2001Q4', 'data_size'='2190') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='35793', 'ndv'='393', 'num_nulls'='0', 'min_value'='1900Q1', 'max_value'='1997Q4', 'data_size'='214758') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='2002Q1', 'max_value'='2002Q4', 'data_size'='2190') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='366', 'ndv'='4', 'num_nulls'='0', 'min_value'='2000Q1', 'max_value'='2000Q4', 'data_size'='2196') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='35065', 'ndv'='387', 'num_nulls'='0', 'min_value'='2004Q1', 'max_value'='2100Q1', 'data_size'='210390') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='2003Q1', 'max_value'='2003Q4', 'data_size'='2190') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1998Q1', 'max_value'='1998Q4', 'data_size'='2190') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1999Q1', 'max_value'='1999Q4', 'data_size'='2190') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='35793', 'ndv'='394', 'num_nulls'='0', 'min_value'='1', 'max_value'='393', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='405', 'max_value'='409', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='366', 'ndv'='5', 'num_nulls'='0', 'min_value'='401', 'max_value'='405', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='397', 'max_value'='401', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='409', 'max_value'='413', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='393', 'max_value'='397', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='413', 'max_value'='417', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='35065', 'ndv'='387', 'num_nulls'='0', 'min_value'='417', 'max_value'='801', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='35793', 'ndv'='35806', 'num_nulls'='0', 'min_value'='2414930', 'max_value'='2450722', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='362', 'num_nulls'='0', 'min_value'='2451088', 'max_value'='2451452', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='362', 'num_nulls'='0', 'min_value'='2451819', 'max_value'='2452183', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='361', 'num_nulls'='0', 'min_value'='2450723', 'max_value'='2451087', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2452184', 'max_value'='2452548', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='362', 'num_nulls'='0', 'min_value'='2452549', 'max_value'='2452913', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='35065', 'ndv'='34991', 'num_nulls'='0', 'min_value'='2452914', 'max_value'='2487978', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='366', 'ndv'='365', 'num_nulls'='0', 'min_value'='2451453', 'max_value'='2451818', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2452276', 'max_value'='2452640', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='35793', 'ndv'='35878', 'num_nulls'='0', 'min_value'='2414657', 'max_value'='2450449', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='366', 'ndv'='363', 'num_nulls'='0', 'min_value'='2451180', 'max_value'='2451544', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='363', 'num_nulls'='0', 'min_value'='2451545', 'max_value'='2451910', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2450450', 'max_value'='2450814', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='35065', 'ndv'='35076', 'num_nulls'='0', 'min_value'='2452641', 'max_value'='2487705', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='363', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2451179', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2451911', 'max_value'='2452275', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5270', 'max_value'='5322', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5114', 'max_value'='5166', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5166', 'max_value'='5218', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5375', 'max_value'='5427', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5323', 'max_value'='5375', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='35065', 'ndv'='5008', 'num_nulls'='0', 'min_value'='5427', 'max_value'='10436', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='35793', 'ndv'='5136', 'num_nulls'='0', 'min_value'='1', 'max_value'='5114', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='366', 'ndv'='53', 'num_nulls'='0', 'min_value'='5218', 'max_value'='5270', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='366', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='35793', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='35065', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1999', 'max_value'='1999', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='35793', 'ndv'='98', 'num_nulls'='0', 'min_value'='1900', 'max_value'='1997', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1998', 'max_value'='1998', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2001', 'max_value'='2001', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2002', 'max_value'='2002', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='2000', 'max_value'='2000', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2003', 'max_value'='2003', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='35065', 'ndv'='97', 'num_nulls'='0', 'min_value'='2004', 'max_value'='2100', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='Y', 'max_value'='Y', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='2003-01-01', 'max_value'='2003-12-31', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='1999-01-01', 'max_value'='1999-12-31', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='35793', 'ndv'='35630', 'num_nulls'='0', 'min_value'='1900-01-02', 'max_value'='1997-12-31', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2002-01-01', 'max_value'='2002-12-31', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='35065', 'ndv'='35118', 'num_nulls'='0', 'min_value'='2004-01-01', 'max_value'='2100-01-01', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2001-01-01', 'max_value'='2001-12-31', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='1998-01-01', 'max_value'='1998-12-31', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='366', 'ndv'='366', 'num_nulls'='0', 'min_value'='2000-01-01', 'max_value'='2000-12-31', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='35793', 'ndv'='35428', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAFCAA', 'max_value'='AAAAAAAAPPPPECAA', 'data_size'='572688') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='368', 'num_nulls'='0', 'min_value'='AAAAAAAAAAGGFCAA', 'max_value'='AAAAAAAAPPFGFCAA', 'data_size'='5840') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='AAAAAAAAAAHGFCAA', 'max_value'='AAAAAAAAPPHGFCAA', 'data_size'='5840') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='AAAAAAAAAANGFCAA', 'max_value'='AAAAAAAAPPNGFCAA', 'data_size'='5840') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='AAAAAAAAAAKGFCAA', 'max_value'='AAAAAAAAPPKGFCAA', 'data_size'='5840') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='35065', 'ndv'='35542', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAHFCAA', 'max_value'='AAAAAAAAPPPOFCAA', 'data_size'='561040') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='AAAAAAAAAAMGFCAA', 'max_value'='AAAAAAAAPPLGFCAA', 'data_size'='5840') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='366', 'ndv'='362', 'num_nulls'='0', 'min_value'='AAAAAAAAAAJGFCAA', 'max_value'='AAAAAAAAPPIGFCAA', 'data_size'='5856') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2451179', 'data_size'='2920') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='2451180', 'max_value'='2451544', 'data_size'='2920') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2451911', 'max_value'='2452275', 'data_size'='2920') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='366', 'ndv'='367', 'num_nulls'='0', 'min_value'='2451545', 'max_value'='2451910', 'data_size'='2928') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='35065', 'ndv'='35067', 'num_nulls'='0', 'min_value'='2453006', 'max_value'='2488070', 'data_size'='280520') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='365', 'num_nulls'='0', 'min_value'='2452276', 'max_value'='2452640', 'data_size'='2920') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='35793', 'ndv'='36266', 'num_nulls'='0', 'min_value'='2415022', 'max_value'='2450814', 'data_size'='286344') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='365', 'num_nulls'='0', 'min_value'='2452641', 'max_value'='2453005', 'data_size'='2920') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='366', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2614') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='35065', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='250466') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2608') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='35793', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='255663') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2607') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2606') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2606') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2609') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='35793', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='35065', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='366', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='35793', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='366', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='35065', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451911', 'max_value'='2452245', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451180', 'max_value'='2451514', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='35793', 'ndv'='1181', 'num_nulls'='0', 'min_value'='2415021', 'max_value'='2450784', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451545', 'max_value'='2451880', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452641', 'max_value'='2452975', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452276', 'max_value'='2452610', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='35065', 'ndv'='1161', 'num_nulls'='0', 'min_value'='2453006', 'max_value'='2488070', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2451149', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='35065', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='35793', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='366', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='413', 'max_value'='417', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='366', 'ndv'='5', 'num_nulls'='0', 'min_value'='401', 'max_value'='405', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='35793', 'ndv'='394', 'num_nulls'='0', 'min_value'='1', 'max_value'='393', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='393', 'max_value'='397', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='405', 'max_value'='409', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='397', 'max_value'='401', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='409', 'max_value'='413', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='35065', 'ndv'='387', 'num_nulls'='0', 'min_value'='417', 'max_value'='801', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5375', 'max_value'='5427', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5270', 'max_value'='5322', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5323', 'max_value'='5375', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='35793', 'ndv'='5136', 'num_nulls'='0', 'min_value'='1', 'max_value'='5114', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='35065', 'ndv'='5008', 'num_nulls'='0', 'min_value'='5427', 'max_value'='10436', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='366', 'ndv'='53', 'num_nulls'='0', 'min_value'='5218', 'max_value'='5270', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5114', 'max_value'='5166', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5166', 'max_value'='5218', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='2000', 'max_value'='2000', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1998', 'max_value'='1998', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2002', 'max_value'='2002', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1999', 'max_value'='1999', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='35065', 'ndv'='97', 'num_nulls'='0', 'min_value'='2004', 'max_value'='2100', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='35793', 'ndv'='98', 'num_nulls'='0', 'min_value'='1900', 'max_value'='1997', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2001', 'max_value'='2001', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2003', 'max_value'='2003', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='35065', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='366', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='35793', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452275', 'max_value'='2452943', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451910', 'max_value'='2452578', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='35793', 'ndv'='1186', 'num_nulls'='0', 'min_value'='2415020', 'max_value'='2451117', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452640', 'max_value'='2453308', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451179', 'max_value'='2451847', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='35065', 'ndv'='1144', 'num_nulls'='0', 'min_value'='2453005', 'max_value'='2488372', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451544', 'max_value'='2452214', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2450814', 'max_value'='2451482', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1188', 'max_value'='1199', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='35793', 'ndv'='1176', 'num_nulls'='0', 'min_value'='0', 'max_value'='1175', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1176', 'max_value'='1187', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1236', 'max_value'='1247', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='35065', 'ndv'='1147', 'num_nulls'='0', 'min_value'='1248', 'max_value'='2400', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1212', 'max_value'='1223', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='1200', 'max_value'='1211', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1224', 'max_value'='1235', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='35793', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='35065', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='366', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='35793', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='35065', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='2001Q1', 'max_value'='2001Q4', 'data_size'='2190') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='35793', 'ndv'='393', 'num_nulls'='0', 'min_value'='1900Q1', 'max_value'='1997Q4', 'data_size'='214758') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='2002Q1', 'max_value'='2002Q4', 'data_size'='2190') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='366', 'ndv'='4', 'num_nulls'='0', 'min_value'='2000Q1', 'max_value'='2000Q4', 'data_size'='2196') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='35065', 'ndv'='387', 'num_nulls'='0', 'min_value'='2004Q1', 'max_value'='2100Q1', 'data_size'='210390') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='2003Q1', 'max_value'='2003Q4', 'data_size'='2190') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1998Q1', 'max_value'='1998Q4', 'data_size'='2190') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1999Q1', 'max_value'='1999Q4', 'data_size'='2190') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='35793', 'ndv'='394', 'num_nulls'='0', 'min_value'='1', 'max_value'='393', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='405', 'max_value'='409', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='366', 'ndv'='5', 'num_nulls'='0', 'min_value'='401', 'max_value'='405', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='397', 'max_value'='401', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='409', 'max_value'='413', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='393', 'max_value'='397', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='413', 'max_value'='417', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='35065', 'ndv'='387', 'num_nulls'='0', 'min_value'='417', 'max_value'='801', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='35793', 'ndv'='35806', 'num_nulls'='0', 'min_value'='2414930', 'max_value'='2450722', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='362', 'num_nulls'='0', 'min_value'='2451088', 'max_value'='2451452', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='362', 'num_nulls'='0', 'min_value'='2451819', 'max_value'='2452183', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='361', 'num_nulls'='0', 'min_value'='2450723', 'max_value'='2451087', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2452184', 'max_value'='2452548', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='362', 'num_nulls'='0', 'min_value'='2452549', 'max_value'='2452913', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='35065', 'ndv'='34991', 'num_nulls'='0', 'min_value'='2452914', 'max_value'='2487978', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='366', 'ndv'='365', 'num_nulls'='0', 'min_value'='2451453', 'max_value'='2451818', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2452276', 'max_value'='2452640', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='35793', 'ndv'='35878', 'num_nulls'='0', 'min_value'='2414657', 'max_value'='2450449', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='366', 'ndv'='363', 'num_nulls'='0', 'min_value'='2451180', 'max_value'='2451544', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='363', 'num_nulls'='0', 'min_value'='2451545', 'max_value'='2451910', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2450450', 'max_value'='2450814', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='35065', 'ndv'='35076', 'num_nulls'='0', 'min_value'='2452641', 'max_value'='2487705', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='363', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2451179', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2451911', 'max_value'='2452275', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5270', 'max_value'='5322', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5114', 'max_value'='5166', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5166', 'max_value'='5218', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5375', 'max_value'='5427', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5323', 'max_value'='5375', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='35065', 'ndv'='5008', 'num_nulls'='0', 'min_value'='5427', 'max_value'='10436', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='35793', 'ndv'='5136', 'num_nulls'='0', 'min_value'='1', 'max_value'='5114', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='366', 'ndv'='53', 'num_nulls'='0', 'min_value'='5218', 'max_value'='5270', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='366', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='35793', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='35065', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1999', 'max_value'='1999', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='35793', 'ndv'='98', 'num_nulls'='0', 'min_value'='1900', 'max_value'='1997', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1998', 'max_value'='1998', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2001', 'max_value'='2001', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2002', 'max_value'='2002', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='2000', 'max_value'='2000', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2003', 'max_value'='2003', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='35065', 'ndv'='97', 'num_nulls'='0', 'min_value'='2004', 'max_value'='2100', 'data_size'='140260') partition (pfuture); -// """ - - - -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.groovy deleted file mode 100644 index ac9bfd0d8cb6b4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - qt_ds_shape_1 ''' - explain shape plan -with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'SD' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.groovy deleted file mode 100644 index 903d035d22e918..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_10 ''' - explain shape plan - - - - -select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Storey County','Marquette County','Warren County','Cochran County','Kandiyohi County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 ANd 1+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.groovy deleted file mode 100644 index e3712ab6cee089..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.groovy +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_11 ''' - explain shape plan - - - - -with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ss_ext_list_price-ss_ext_discount_amt) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ws_ext_list_price-ws_ext_discount_amt) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 2001 - and t_s_secyear.dyear = 2001+1 - and t_w_firstyear.dyear = 2001 - and t_w_secyear.dyear = 2001+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else 0.0 end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else 0.0 end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.groovy deleted file mode 100644 index 04255c01f57aa1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_12 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ws_ext_sales_price) as itemrevenue - ,sum(ws_ext_sales_price)*100/sum(sum(ws_ext_sales_price)) over - (partition by i_class) as revenueratio -from - web_sales - ,item - ,date_dim -where - ws_item_sk = i_item_sk - and i_category in ('Books', 'Sports', 'Men') - and ws_sold_date_sk = d_date_sk - and d_date between cast('1998-04-06' as date) - and (cast('1998-04-06' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.groovy deleted file mode 100644 index f1d8338c174e11..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.groovy +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_13 ''' - explain shape plan - - -select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Unknown' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'S' - and cd_education_status = 'College' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = '4 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('SD', 'KS', 'MI') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MO', 'ND', 'CO') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('NH', 'OH', 'TX') - and ss_net_profit between 50 and 250 - )) -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.groovy deleted file mode 100644 index d9f561d7fd313f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_14 ''' - explain shape plan - - -with cross_items as - (select i_item_sk ss_item_sk - from item, - (select iss.i_brand_id brand_id - ,iss.i_class_id class_id - ,iss.i_category_id category_id - from store_sales - ,item iss - ,date_dim d1 - where ss_item_sk = iss.i_item_sk - and ss_sold_date_sk = d1.d_date_sk - and d1.d_year between 2000 AND 2000 + 2 - intersect - select ics.i_brand_id - ,ics.i_class_id - ,ics.i_category_id - from catalog_sales - ,item ics - ,date_dim d2 - where cs_item_sk = ics.i_item_sk - and cs_sold_date_sk = d2.d_date_sk - and d2.d_year between 2000 AND 2000 + 2 - intersect - select iws.i_brand_id - ,iws.i_class_id - ,iws.i_category_id - from web_sales - ,item iws - ,date_dim d3 - where ws_item_sk = iws.i_item_sk - and ws_sold_date_sk = d3.d_date_sk - and d3.d_year between 2000 AND 2000 + 2) - t where i_brand_id = brand_id - and i_class_id = class_id - and i_category_id = category_id -), - avg_sales as - (select avg(quantity*list_price) average_sales - from (select ss_quantity quantity - ,ss_list_price list_price - from store_sales - ,date_dim - where ss_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select cs_quantity quantity - ,cs_list_price list_price - from catalog_sales - ,date_dim - where cs_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select ws_quantity quantity - ,ws_list_price list_price - from web_sales - ,date_dim - where ws_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2) x) - select channel, i_brand_id,i_class_id,i_category_id,sum(sales), sum(number_sales) - from( - select 'store' channel, i_brand_id,i_class_id - ,i_category_id,sum(ss_quantity*ss_list_price) sales - , count(*) number_sales - from store_sales - ,item - ,date_dim - where ss_item_sk in (select ss_item_sk from cross_items) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales) - union all - select 'catalog' channel, i_brand_id,i_class_id,i_category_id, sum(cs_quantity*cs_list_price) sales, count(*) number_sales - from catalog_sales - ,item - ,date_dim - where cs_item_sk in (select ss_item_sk from cross_items) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(cs_quantity*cs_list_price) > (select average_sales from avg_sales) - union all - select 'web' channel, i_brand_id,i_class_id,i_category_id, sum(ws_quantity*ws_list_price) sales , count(*) number_sales - from web_sales - ,item - ,date_dim - where ws_item_sk in (select ss_item_sk from cross_items) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ws_quantity*ws_list_price) > (select average_sales from avg_sales) - ) y - group by rollup (channel, i_brand_id,i_class_id,i_category_id) - order by channel,i_brand_id,i_class_id,i_category_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.groovy deleted file mode 100644 index 7e5f7c5ff17bd2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_15 ''' - explain shape plan - - - -select ca_zip - ,sum(cs_sales_price) - from catalog_sales - ,customer - ,customer_address - ,date_dim - where cs_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', - '85392', '85460', '80348', '81792') - or ca_state in ('CA','WA','GA') - or cs_sales_price > 500) - and cs_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2001 - group by ca_zip - order by ca_zip - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.groovy deleted file mode 100644 index 81886a513f9a0e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_16 ''' - explain shape plan - - - - -select - count(distinct cs_order_number) as "order count" - ,sum(cs_ext_ship_cost) as "total shipping cost" - ,sum(cs_net_profit) as "total net profit" -from - catalog_sales cs1 - ,date_dim - ,customer_address - ,call_center -where - d_date between '2002-4-01' and - (cast('2002-4-01' as date) + interval 60 day) -and cs1.cs_ship_date_sk = d_date_sk -and cs1.cs_ship_addr_sk = ca_address_sk -and ca_state = 'WV' -and cs1.cs_call_center_sk = cc_call_center_sk -and cc_county in ('Ziebach County','Luce County','Richland County','Daviess County', - 'Barrow County' -) -and exists (select * - from catalog_sales cs2 - where cs1.cs_order_number = cs2.cs_order_number - and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) -and not exists(select * - from catalog_returns cr1 - where cs1.cs_order_number = cr1.cr_order_number) -order by count(distinct cs_order_number) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.groovy deleted file mode 100644 index 7f53dfaa3a0c16..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_17 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,s_state - ,count(ss_quantity) as store_sales_quantitycount - ,avg(ss_quantity) as store_sales_quantityave - ,stddev_samp(ss_quantity) as store_sales_quantitystdev - ,stddev_samp(ss_quantity)/avg(ss_quantity) as store_sales_quantitycov - ,count(sr_return_quantity) as store_returns_quantitycount - ,avg(sr_return_quantity) as store_returns_quantityave - ,stddev_samp(sr_return_quantity) as store_returns_quantitystdev - ,stddev_samp(sr_return_quantity)/avg(sr_return_quantity) as store_returns_quantitycov - ,count(cs_quantity) as catalog_sales_quantitycount ,avg(cs_quantity) as catalog_sales_quantityave - ,stddev_samp(cs_quantity) as catalog_sales_quantitystdev - ,stddev_samp(cs_quantity)/avg(cs_quantity) as catalog_sales_quantitycov - from store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where d1.d_quarter_name = '2001Q1' - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - group by i_item_id - ,i_item_desc - ,s_state - order by i_item_id - ,i_item_desc - ,s_state -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.groovy deleted file mode 100644 index fec13a57cf6313..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_18 ''' - explain shape plan - - - - -select i_item_id, - ca_country, - ca_state, - ca_county, - avg( cast(cs_quantity as decimal(12,2))) agg1, - avg( cast(cs_list_price as decimal(12,2))) agg2, - avg( cast(cs_coupon_amt as decimal(12,2))) agg3, - avg( cast(cs_sales_price as decimal(12,2))) agg4, - avg( cast(cs_net_profit as decimal(12,2))) agg5, - avg( cast(c_birth_year as decimal(12,2))) agg6, - avg( cast(cd1.cd_dep_count as decimal(12,2))) agg7 - from catalog_sales, customer_demographics cd1, - customer_demographics cd2, customer, customer_address, date_dim, item - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd1.cd_demo_sk and - cs_bill_customer_sk = c_customer_sk and - cd1.cd_gender = 'F' and - cd1.cd_education_status = 'Advanced Degree' and - c_current_cdemo_sk = cd2.cd_demo_sk and - c_current_addr_sk = ca_address_sk and - c_birth_month in (10,7,8,4,1,2) and - d_year = 1998 and - ca_state in ('WA','GA','NC' - ,'ME','WY','OK','IN') - group by rollup (i_item_id, ca_country, ca_state, ca_county) - order by ca_country, - ca_state, - ca_county, - i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.groovy deleted file mode 100644 index beb66b47ff57f5..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_19 ''' - explain shape plan - - - - -select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=2 - and d_moy=12 - and d_year=1999 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.groovy deleted file mode 100644 index 2fb61ba9fbd3da..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_ds_shape_2 ''' - explain shape plan - - - - -with wscs as - (select sold_date_sk - ,sales_price - from (select ws_sold_date_sk sold_date_sk - ,ws_ext_sales_price sales_price - from web_sales - union all - select cs_sold_date_sk sold_date_sk - ,cs_ext_sales_price sales_price - from catalog_sales) t), - wswscs as - (select d_week_seq, - sum(case when (d_day_name='Sunday') then sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then sales_price else null end) sat_sales - from wscs - ,date_dim - where d_date_sk = sold_date_sk - group by d_week_seq) - select d_week_seq1 - ,round(sun_sales1/sun_sales2,2) - ,round(mon_sales1/mon_sales2,2) - ,round(tue_sales1/tue_sales2,2) - ,round(wed_sales1/wed_sales2,2) - ,round(thu_sales1/thu_sales2,2) - ,round(fri_sales1/fri_sales2,2) - ,round(sat_sales1/sat_sales2,2) - from - (select wswscs.d_week_seq d_week_seq1 - ,sun_sales sun_sales1 - ,mon_sales mon_sales1 - ,tue_sales tue_sales1 - ,wed_sales wed_sales1 - ,thu_sales thu_sales1 - ,fri_sales fri_sales1 - ,sat_sales sat_sales1 - from wswscs,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998) y, - (select wswscs.d_week_seq d_week_seq2 - ,sun_sales sun_sales2 - ,mon_sales mon_sales2 - ,tue_sales tue_sales2 - ,wed_sales wed_sales2 - ,thu_sales thu_sales2 - ,fri_sales fri_sales2 - ,sat_sales sat_sales2 - from wswscs - ,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998+1) z - where d_week_seq1=d_week_seq2-53 - order by d_week_seq1; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.groovy deleted file mode 100644 index aead630449670f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_20 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(cs_ext_sales_price) as itemrevenue - ,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over - (partition by i_class) as revenueratio - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and i_category in ('Shoes', 'Books', 'Women') - and cs_sold_date_sk = d_date_sk - and d_date between cast('2002-01-26' as date) - and (cast('2002-01-26' as date) + interval 30 day) - group by i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - order by i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.groovy deleted file mode 100644 index 8d7a39c97354b0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'SET enable_fold_constant_by_be = false' //plan shape will be different - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_21 ''' - explain shape plan - - - - -select * - from(select w_warehouse_name - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_before - ,sum(case when (cast(d_date as date) >= cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_after - from inventory - ,warehouse - ,item - ,date_dim - where i_current_price between 0.99 and 1.49 - and i_item_sk = inv_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_date between (cast ('2002-02-27' as date) - interval 30 day) - and (cast ('2002-02-27' as date) + interval 30 day) - group by w_warehouse_name, i_item_id) x - where (case when inv_before > 0 - then inv_after / inv_before - else null - end) between 2.0/3.0 and 3.0/2.0 - order by w_warehouse_name - ,i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.groovy deleted file mode 100644 index c31efd8b73852d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_22 ''' - explain shape plan - - - - -select i_product_name - ,i_brand - ,i_class - ,i_category - ,avg(inv_quantity_on_hand) qoh - from inventory - ,date_dim - ,item - where inv_date_sk=d_date_sk - and inv_item_sk=i_item_sk - and d_month_seq between 1188 and 1188 + 11 - group by rollup(i_product_name - ,i_brand - ,i_class - ,i_category) -order by qoh, i_product_name, i_brand, i_class, i_category -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.groovy deleted file mode 100644 index 91494ee79761b8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_23 ''' - explain shape plan - - - -with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), - best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.groovy deleted file mode 100644 index bfbd00add04fb4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.groovy +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_24 ''' - explain shape plan - - - - with ssales as - (select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_profit) netpaid - from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address - where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip - and s_market_id=8 - group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) - select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid - from ssales - where i_color = 'beige' - group by c_last_name - ,c_first_name - ,s_store_name - having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) - order by c_last_name - ,c_first_name - ,s_store_name - ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.groovy deleted file mode 100644 index 0bb08f70c81758..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query25") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_25 ''' - explain shape plan - - - -select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,sum(ss_net_profit) as store_sales_profit - ,sum(sr_net_loss) as store_returns_loss - ,sum(cs_net_profit) as catalog_sales_profit - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 2000 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 10 - and d2.d_year = 2000 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_moy between 4 and 10 - and d3.d_year = 2000 - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.groovy deleted file mode 100644 index 01f80b4596d062..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query26") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_26 ''' - explain shape plan - - - - -select i_item_id, - avg(cs_quantity) agg1, - avg(cs_list_price) agg2, - avg(cs_coupon_amt) agg3, - avg(cs_sales_price) agg4 - from catalog_sales, customer_demographics, date_dim, item, promotion - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd_demo_sk and - cs_promo_sk = p_promo_sk and - cd_gender = 'M' and - cd_marital_status = 'S' and - cd_education_status = 'Unknown' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.groovy deleted file mode 100644 index 407f2ab9ea9af7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query27") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_27 ''' - explain shape plan - - - -select i_item_id, - s_state, grouping(s_state) g_state, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, store, item - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_store_sk = s_store_sk and - ss_cdemo_sk = cd_demo_sk and - cd_gender = 'F' and - cd_marital_status = 'D' and - cd_education_status = 'Secondary' and - d_year = 1999 and - s_state in ('MO','AL', 'MI', 'TN', 'LA', 'SC') - group by rollup (i_item_id, s_state) - order by i_item_id - ,s_state - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.groovy deleted file mode 100644 index aacdcc03a4a946..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query28") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_28 ''' - explain shape plan - - - - -select * -from (select avg(ss_list_price) B1_LP - ,count(ss_list_price) B1_CNT - ,count(distinct ss_list_price) B1_CNTD - from store_sales - where ss_quantity between 0 and 5 - and (ss_list_price between 131 and 131+10 - or ss_coupon_amt between 16798 and 16798+1000 - or ss_wholesale_cost between 25 and 25+20)) B1, - (select avg(ss_list_price) B2_LP - ,count(ss_list_price) B2_CNT - ,count(distinct ss_list_price) B2_CNTD - from store_sales - where ss_quantity between 6 and 10 - and (ss_list_price between 145 and 145+10 - or ss_coupon_amt between 14792 and 14792+1000 - or ss_wholesale_cost between 46 and 46+20)) B2, - (select avg(ss_list_price) B3_LP - ,count(ss_list_price) B3_CNT - ,count(distinct ss_list_price) B3_CNTD - from store_sales - where ss_quantity between 11 and 15 - and (ss_list_price between 150 and 150+10 - or ss_coupon_amt between 6600 and 6600+1000 - or ss_wholesale_cost between 9 and 9+20)) B3, - (select avg(ss_list_price) B4_LP - ,count(ss_list_price) B4_CNT - ,count(distinct ss_list_price) B4_CNTD - from store_sales - where ss_quantity between 16 and 20 - and (ss_list_price between 91 and 91+10 - or ss_coupon_amt between 13493 and 13493+1000 - or ss_wholesale_cost between 36 and 36+20)) B4, - (select avg(ss_list_price) B5_LP - ,count(ss_list_price) B5_CNT - ,count(distinct ss_list_price) B5_CNTD - from store_sales - where ss_quantity between 21 and 25 - and (ss_list_price between 0 and 0+10 - or ss_coupon_amt between 7629 and 7629+1000 - or ss_wholesale_cost between 6 and 6+20)) B5, - (select avg(ss_list_price) B6_LP - ,count(ss_list_price) B6_CNT - ,count(distinct ss_list_price) B6_CNTD - from store_sales - where ss_quantity between 26 and 30 - and (ss_list_price between 89 and 89+10 - or ss_coupon_amt between 15257 and 15257+1000 - or ss_wholesale_cost between 31 and 31+20)) B6 -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.groovy deleted file mode 100644 index 98f4c2ffd57a4d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query29") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_29 ''' - explain shape plan - - - - -select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,avg(ss_quantity) as store_sales_quantity - ,avg(sr_return_quantity) as store_returns_quantity - ,avg(cs_quantity) as catalog_sales_quantity - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1999 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 4 + 3 - and d2.d_year = 1999 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_year in (1999,1999+1,1999+2) - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.groovy deleted file mode 100644 index d91dc9bdbc63f3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_ds_shape_3 ''' - explain shape plan - - - - -select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_sales_price) sum_agg - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manufact_id = 816 - and dt.d_moy=11 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,sum_agg desc - ,brand_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.groovy deleted file mode 100644 index f3c4f793cb206e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query30") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_30 ''' - explain shape plan - - - - -with customer_total_return as - (select wr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(wr_return_amt) as ctr_total_return - from web_returns - ,date_dim - ,customer_address - where wr_returned_date_sk = d_date_sk - and d_year =2002 - and wr_returning_addr_sk = ca_address_sk - group by wr_returning_customer_sk - ,ca_state) - select c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'IN' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.groovy deleted file mode 100644 index 35a6d672ddc2dd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query31") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_31 ''' - explain shape plan - - - - -with ss as - (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales - from store_sales,date_dim,customer_address - where ss_sold_date_sk = d_date_sk - and ss_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year), - ws as - (select ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales - from web_sales,date_dim,customer_address - where ws_sold_date_sk = d_date_sk - and ws_bill_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year) - select - ss1.ca_county - ,ss1.d_year - ,ws2.web_sales/ws1.web_sales web_q1_q2_increase - ,ss2.store_sales/ss1.store_sales store_q1_q2_increase - ,ws3.web_sales/ws2.web_sales web_q2_q3_increase - ,ss3.store_sales/ss2.store_sales store_q2_q3_increase - from - ss ss1 - ,ss ss2 - ,ss ss3 - ,ws ws1 - ,ws ws2 - ,ws ws3 - where - ss1.d_qoy = 1 - and ss1.d_year = 2000 - and ss1.ca_county = ss2.ca_county - and ss2.d_qoy = 2 - and ss2.d_year = 2000 - and ss2.ca_county = ss3.ca_county - and ss3.d_qoy = 3 - and ss3.d_year = 2000 - and ss1.ca_county = ws1.ca_county - and ws1.d_qoy = 1 - and ws1.d_year = 2000 - and ws1.ca_county = ws2.ca_county - and ws2.d_qoy = 2 - and ws2.d_year = 2000 - and ws1.ca_county = ws3.ca_county - and ws3.d_qoy = 3 - and ws3.d_year =2000 - and case when ws1.web_sales > 0 then ws2.web_sales/ws1.web_sales else null end - > case when ss1.store_sales > 0 then ss2.store_sales/ss1.store_sales else null end - and case when ws2.web_sales > 0 then ws3.web_sales/ws2.web_sales else null end - > case when ss2.store_sales > 0 then ss3.store_sales/ss2.store_sales else null end - order by web_q1_q2_increase; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.groovy deleted file mode 100644 index 0e712c8804d29c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.groovy +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query32") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_32 ''' - explain shape plan - - - - -select sum(cs_ext_discount_amt) as "excess discount amount" -from - catalog_sales - ,item - ,date_dim -where -i_manufact_id = 29 -and i_item_sk = cs_item_sk -and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) -and d_date_sk = cs_sold_date_sk -and cs_ext_discount_amt - > ( - select - 1.3 * avg(cs_ext_discount_amt) - from - catalog_sales - ,date_dim - where - cs_item_sk = i_item_sk - and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) - and d_date_sk = cs_sold_date_sk - ) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.groovy deleted file mode 100644 index 61fe0c580384d0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.groovy +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query33") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_33 ''' - explain shape plan - - - - -with ss as ( - select - i_manufact_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - cs as ( - select - i_manufact_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - ws as ( - select - i_manufact_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id) - select i_manufact_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_manufact_id - order by total_sales -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.groovy deleted file mode 100644 index 7b0a9cb22fffbc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query34") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_34 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '0-500') - and household_demographics.hd_vehicle_count > 0 - and (case when household_demographics.hd_vehicle_count > 0 - then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count - else null - end) > 1.2 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_county in ('Ziebach County','Daviess County','Walker County','Richland County', - 'Barrow County','Franklin Parish','Williamson County','Luce County') - group by ss_ticket_number,ss_customer_sk) dn,customer - where ss_customer_sk = c_customer_sk - and cnt between 15 and 20 - order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.groovy deleted file mode 100644 index 715fb701b86a60..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.groovy +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query35") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_35 ''' - explain shape plan - - - - -select - ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - count(*) cnt1, - max(cd_dep_count), - sum(cd_dep_count), - max(cd_dep_count), - cd_dep_employed_count, - count(*) cnt2, - max(cd_dep_employed_count), - sum(cd_dep_employed_count), - max(cd_dep_employed_count), - cd_dep_college_count, - count(*) cnt3, - max(cd_dep_college_count), - sum(cd_dep_college_count), - max(cd_dep_college_count) - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4)) - group by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.groovy deleted file mode 100644 index b67ecc699fec84..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query36") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_36 ''' - explain shape plan - - - - -select - sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ss_net_profit)/sum(ss_ext_sales_price) asc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,item - ,store - where - d1.d_year = 2002 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and s_state in ('SD','TN','GA','SC', - 'MO','AL','MI','OH') - group by rollup(i_category,i_class) - order by - lochierarchy desc - ,case when lochierarchy = 0 then i_category end - ,rank_within_parent - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.groovy deleted file mode 100644 index ee3b6b8ebf2224..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query37") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_37 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, catalog_sales - where i_current_price between 45 and 45 + 30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-02-21' as date) and (cast('1999-02-21' as date) + interval 60 day) - and i_manufact_id in (856,707,1000,747) - and inv_quantity_on_hand between 100 and 500 - and cs_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.groovy deleted file mode 100644 index 2e1a2d1a5a12fa..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query38") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_38 ''' - explain shape plan - - - - -select count(*) from ( - select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 -) hot_cust -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.groovy deleted file mode 100644 index f041f52ac11295..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query39") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_39 ''' - explain shape plan - - - - -with inv as -(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stdev,mean, case mean when 0 then null else stdev/mean end cov - from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean - from inventory - ,item - ,warehouse - ,date_dim - where inv_item_sk = i_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_year =1998 - group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo - where case mean when 0 then 0 else stdev/mean end > 1) -select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov - ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov -from inv inv1,inv inv2 -where inv1.i_item_sk = inv2.i_item_sk - and inv1.w_warehouse_sk = inv2.w_warehouse_sk - and inv1.d_moy=1 - and inv2.d_moy=1+1 -order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov - ,inv2.d_moy,inv2.mean, inv2.cov -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.groovy deleted file mode 100644 index 1af702e366764a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.groovy +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_4 ''' - explain shape plan - - -with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)/2) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((cs_ext_list_price-cs_ext_wholesale_cost-cs_ext_discount_amt)+cs_ext_sales_price)/2) ) year_total - ,'c' sale_type - from customer - ,catalog_sales - ,date_dim - where c_customer_sk = cs_bill_customer_sk - and cs_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year -union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((ws_ext_list_price-ws_ext_wholesale_cost-ws_ext_discount_amt)+ws_ext_sales_price)/2) ) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_c_firstyear - ,year_total t_c_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_c_secyear.customer_id - and t_s_firstyear.customer_id = t_c_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_c_firstyear.sale_type = 'c' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_c_secyear.sale_type = 'c' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1999 - and t_s_secyear.dyear = 1999+1 - and t_c_firstyear.dyear = 1999 - and t_c_secyear.dyear = 1999+1 - and t_w_firstyear.dyear = 1999 - and t_w_secyear.dyear = 1999+1 - and t_s_firstyear.year_total > 0 - and t_c_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.groovy deleted file mode 100644 index b943f2471b568b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query40") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_40 ''' - explain shape plan - - - -select - w_state - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before - ,sum(case when (cast(d_date as date) >= cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after - from - catalog_sales left outer join catalog_returns on - (cs_order_number = cr_order_number - and cs_item_sk = cr_item_sk) - ,warehouse - ,item - ,date_dim - where - i_current_price between 0.99 and 1.49 - and i_item_sk = cs_item_sk - and cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and d_date between (cast ('2001-04-02' as date) - interval 30 day) - and (cast ('2001-04-02' as date) + interval 30 day) - group by - w_state,i_item_id - order by w_state,i_item_id -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.groovy deleted file mode 100644 index c14fdbb8a94abb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query41") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_41 ''' - explain shape plan - - - - -select distinct(i_product_name) - from item i1 - where i_manufact_id between 748 and 748+40 - and (select count(*) as item_cnt - from item - where (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'gainsboro' or i_color = 'aquamarine') and - (i_units = 'Ounce' or i_units = 'Dozen') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'chiffon' or i_color = 'violet') and - (i_units = 'Ton' or i_units = 'Pound') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'chartreuse' or i_color = 'blue') and - (i_units = 'Each' or i_units = 'Oz') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'tan' or i_color = 'dodger') and - (i_units = 'Bunch' or i_units = 'Tsp') and - (i_size = 'medium' or i_size = 'economy') - ))) or - (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'blanched' or i_color = 'tomato') and - (i_units = 'Tbl' or i_units = 'Case') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'almond' or i_color = 'lime') and - (i_units = 'Box' or i_units = 'Dram') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'peru' or i_color = 'saddle') and - (i_units = 'Pallet' or i_units = 'Gram') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'indian' or i_color = 'spring') and - (i_units = 'Unknown' or i_units = 'Carton') and - (i_size = 'medium' or i_size = 'economy') - )))) > 0 - order by i_product_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.groovy deleted file mode 100644 index e533209b57668f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query42") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_42 ''' - explain shape plan - - - - -select dt.d_year - ,item.i_category_id - ,item.i_category - ,sum(ss_ext_sales_price) - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=11 - and dt.d_year=2002 - group by dt.d_year - ,item.i_category_id - ,item.i_category - order by sum(ss_ext_sales_price) desc,dt.d_year - ,item.i_category_id - ,item.i_category -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.groovy deleted file mode 100644 index cc53ae91ec0368..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query43") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_43 ''' - explain shape plan - - - - -select s_store_name, s_store_id, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from date_dim, store_sales, store - where d_date_sk = ss_sold_date_sk and - s_store_sk = ss_store_sk and - s_gmt_offset = -5 and - d_year = 2000 - group by s_store_name, s_store_id - order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.groovy deleted file mode 100644 index cc655c04360072..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query44") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_44 ''' - explain shape plan - - - - -select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.groovy deleted file mode 100644 index f60e79ad7c85ff..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query45") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_45 ''' - explain shape plan - - - - -select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.groovy deleted file mode 100644 index 8a0afcea593227..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query46") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_46 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and (household_demographics.hd_dep_count = 6 or - household_demographics.hd_vehicle_count= 0) - and date_dim.d_dow in (6,0) - and date_dim.d_year in (1999,1999+1,1999+2) - and store.s_city in ('Five Points','Centerville','Oak Grove','Fairview','Liberty') - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.groovy deleted file mode 100644 index 4845dfd8a32c6f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query47") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_47 ''' - explain shape plan - - -with v1 as( - select i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, - s_store_name, s_company_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - s_store_name, s_company_name - order by d_year, d_moy) rn - from item, store_sales, date_dim, store - where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - ( - d_year = 2001 or - ( d_year = 2001-1 and d_moy =12) or - ( d_year = 2001+1 and d_moy =1) - ) - group by i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy), - v2 as( - select v1.s_store_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1.s_store_name = v1_lag.s_store_name and - v1.s_store_name = v1_lead.s_store_name and - v1.s_company_name = v1_lag.s_company_name and - v1.s_company_name = v1_lead.s_company_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2001 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.groovy deleted file mode 100644 index 9f6f638c5fd373..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.groovy +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query48") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_48 ''' - explain shape plan - - - - -select sum (ss_quantity) - from store_sales, store, customer_demographics, customer_address, date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 1999 - and - ( - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'U' - and - cd_education_status = 'Primary' - and - ss_sales_price between 100.00 and 150.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'W' - and - cd_education_status = 'College' - and - ss_sales_price between 50.00 and 100.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'D' - and - cd_education_status = '2 yr Degree' - and - ss_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MD', 'MN', 'IA') - and ss_net_profit between 0 and 2000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('VA', 'IL', 'TX') - and ss_net_profit between 150 and 3000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MI', 'WI', 'IN') - and ss_net_profit between 50 and 25000 - ) - ) -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.groovy deleted file mode 100644 index b13177064ac384..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.groovy +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query49") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_49 ''' - explain shape plan - - - - -select channel, item, return_ratio, return_rank, currency_rank from - (select - 'web' as channel - ,web.item - ,web.return_ratio - ,web.return_rank - ,web.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select ws.ws_item_sk as item - ,(cast(sum(coalesce(wr.wr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(wr.wr_return_amt,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - web_sales ws left outer join web_returns wr - on (ws.ws_order_number = wr.wr_order_number and - ws.ws_item_sk = wr.wr_item_sk) - ,date_dim - where - wr.wr_return_amt > 10000 - and ws.ws_net_profit > 1 - and ws.ws_net_paid > 0 - and ws.ws_quantity > 0 - and ws_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by ws.ws_item_sk - ) in_web - ) web - where - ( - web.return_rank <= 10 - or - web.currency_rank <= 10 - ) - union - select - 'catalog' as channel - ,catalog.item - ,catalog.return_ratio - ,catalog.return_rank - ,catalog.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select - cs.cs_item_sk as item - ,(cast(sum(coalesce(cr.cr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(cr.cr_return_amount,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - catalog_sales cs left outer join catalog_returns cr - on (cs.cs_order_number = cr.cr_order_number and - cs.cs_item_sk = cr.cr_item_sk) - ,date_dim - where - cr.cr_return_amount > 10000 - and cs.cs_net_profit > 1 - and cs.cs_net_paid > 0 - and cs.cs_quantity > 0 - and cs_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by cs.cs_item_sk - ) in_cat - ) catalog - where - ( - catalog.return_rank <= 10 - or - catalog.currency_rank <=10 - ) - union - select - 'store' as channel - ,store.item - ,store.return_ratio - ,store.return_rank - ,store.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select sts.ss_item_sk as item - ,(cast(sum(coalesce(sr.sr_return_quantity,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(sr.sr_return_amt,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - store_sales sts left outer join store_returns sr - on (sts.ss_ticket_number = sr.sr_ticket_number and sts.ss_item_sk = sr.sr_item_sk) - ,date_dim - where - sr.sr_return_amt > 10000 - and sts.ss_net_profit > 1 - and sts.ss_net_paid > 0 - and sts.ss_quantity > 0 - and ss_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by sts.ss_item_sk - ) in_store - ) store - where ( - store.return_rank <= 10 - or - store.currency_rank <= 10 - ) - ) - t order by 1,4,5,2 - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.groovy deleted file mode 100644 index 222b4ae79923b6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.groovy +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_5 ''' - explain shape plan - - - - -with ssr as - (select s_store_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ss_store_sk as store_sk, - ss_sold_date_sk as date_sk, - ss_ext_sales_price as sales_price, - ss_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from store_sales - union all - select sr_store_sk as store_sk, - sr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - sr_return_amt as return_amt, - sr_net_loss as net_loss - from store_returns - ) salesreturns, - date_dim, - store - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and store_sk = s_store_sk - group by s_store_id) - , - csr as - (select cp_catalog_page_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select cs_catalog_page_sk as page_sk, - cs_sold_date_sk as date_sk, - cs_ext_sales_price as sales_price, - cs_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from catalog_sales - union all - select cr_catalog_page_sk as page_sk, - cr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - cr_return_amount as return_amt, - cr_net_loss as net_loss - from catalog_returns - ) salesreturns, - date_dim, - catalog_page - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and page_sk = cp_catalog_page_sk - group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ws_web_site_sk as wsr_web_site_sk, - ws_sold_date_sk as date_sk, - ws_ext_sales_price as sales_price, - ws_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from web_sales - union all - select ws_web_site_sk as wsr_web_site_sk, - wr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - wr_return_amt as return_amt, - wr_net_loss as net_loss - from web_returns left outer join web_sales on - ( wr_item_sk = ws_item_sk - and wr_order_number = ws_order_number) - ) salesreturns, - date_dim, - web_site - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and wsr_web_site_sk = web_site_sk - group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', s_store_id) id - , sales - , returns - , (profit - profit_loss) as profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', cp_catalog_page_id) id - , sales - , returns - , (profit - profit_loss) as profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) id - , sales - , returns - , (profit - profit_loss) as profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.groovy deleted file mode 100644 index e97fd67f20d883..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query50") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_50 ''' - explain shape plan - - - - -select - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 30) and - (sr_returned_date_sk - ss_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 60) and - (sr_returned_date_sk - ss_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 90) and - (sr_returned_date_sk - ss_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - store_sales - ,store_returns - ,store - ,date_dim d1 - ,date_dim d2 -where - d2.d_year = 2001 -and d2.d_moy = 8 -and ss_ticket_number = sr_ticket_number -and ss_item_sk = sr_item_sk -and ss_sold_date_sk = d1.d_date_sk -and sr_returned_date_sk = d2.d_date_sk -and ss_customer_sk = sr_customer_sk -and ss_store_sk = s_store_sk -group by - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -order by s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.groovy deleted file mode 100644 index e80a42a2a9e8c4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query51") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_51 ''' - explain shape plan - - - - -WITH web_v1 as ( -select - ws_item_sk item_sk, d_date, - sum(sum(ws_sales_price)) - over (partition by ws_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from web_sales - ,date_dim -where ws_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ws_item_sk is not NULL -group by ws_item_sk, d_date), -store_v1 as ( -select - ss_item_sk item_sk, d_date, - sum(sum(ss_sales_price)) - over (partition by ss_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from store_sales - ,date_dim -where ss_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ss_item_sk is not NULL -group by ss_item_sk, d_date) - select * -from (select item_sk - ,d_date - ,web_sales - ,store_sales - ,max(web_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) web_cumulative - ,max(store_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) store_cumulative - from (select case when web.item_sk is not null then web.item_sk else store.item_sk end item_sk - ,case when web.d_date is not null then web.d_date else store.d_date end d_date - ,web.cume_sales web_sales - ,store.cume_sales store_sales - from web_v1 web full outer join store_v1 store on (web.item_sk = store.item_sk - and web.d_date = store.d_date) - )x )y -where web_cumulative > store_cumulative -order by item_sk - ,d_date -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.groovy deleted file mode 100644 index b827f1c2c0dc39..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query52") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_52 ''' - explain shape plan - - - - -select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_ext_sales_price) ext_price - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=12 - and dt.d_year=2002 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,ext_price desc - ,brand_id -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.groovy deleted file mode 100644 index fb8fe661c8b401..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query53") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_53 ''' - explain shape plan - - - - -select * from -(select i_manufact_id, -sum(ss_sales_price) sum_sales, -avg(sum(ss_sales_price)) over (partition by i_manufact_id) avg_quarterly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and -ss_sold_date_sk = d_date_sk and -ss_store_sk = s_store_sk and -d_month_seq in (1200,1200+1,1200+2,1200+3,1200+4,1200+5,1200+6,1200+7,1200+8,1200+9,1200+10,1200+11) and -((i_category in ('Books','Children','Electronics') and -i_class in ('personal','portable','reference','self-help') and -i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) -or(i_category in ('Women','Music','Men') and -i_class in ('accessories','classical','fragrances','pants') and -i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manufact_id, d_qoy ) tmp1 -where case when avg_quarterly_sales > 0 - then abs (sum_sales - avg_quarterly_sales)/ avg_quarterly_sales - else null end > 0.1 -order by avg_quarterly_sales, - sum_sales, - i_manufact_id -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.groovy deleted file mode 100644 index a919601deab68b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.groovy +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query54") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_54 ''' - explain shape plan - - - - -with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Women' - and i_class = 'maternity' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 5 - and d_year = 1998 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1998 and d_moy = 5) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1998 and d_moy = 5) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.groovy deleted file mode 100644 index a1760bd16d2a23..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query55") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_55 ''' - explain shape plan - - - - -select i_brand_id brand_id, i_brand brand, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=100 - and d_moy=12 - and d_year=2000 - group by i_brand, i_brand_id - order by ext_price desc, i_brand_id -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.groovy deleted file mode 100644 index 6e611882852e61..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.groovy +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query56") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_56 ''' - explain shape plan - - - - -with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.groovy deleted file mode 100644 index 5be0c3041c2082..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query57") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_57 ''' - explain shape plan - - - -with v1 as( - select i_category, i_brand, - cc_name, - d_year, d_moy, - sum(cs_sales_price) sum_sales, - avg(sum(cs_sales_price)) over - (partition by i_category, i_brand, - cc_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - cc_name - order by d_year, d_moy) rn - from item, catalog_sales, date_dim, call_center - where cs_item_sk = i_item_sk and - cs_sold_date_sk = d_date_sk and - cc_call_center_sk= cs_call_center_sk and - ( - d_year = 1999 or - ( d_year = 1999-1 and d_moy =12) or - ( d_year = 1999+1 and d_moy =1) - ) - group by i_category, i_brand, - cc_name , d_year, d_moy), - v2 as( - select v1.i_brand - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1. cc_name = v1_lag. cc_name and - v1. cc_name = v1_lead. cc_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 1999 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.groovy deleted file mode 100644 index a34ff64eee162c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.groovy +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query58") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_58 ''' - explain shape plan - - - - -with ss_items as - (select i_item_id item_id - ,sum(ss_ext_sales_price) ss_item_rev - from store_sales - ,item - ,date_dim - where ss_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ss_sold_date_sk = d_date_sk - group by i_item_id), - cs_items as - (select i_item_id item_id - ,sum(cs_ext_sales_price) cs_item_rev - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and cs_sold_date_sk = d_date_sk - group by i_item_id), - ws_items as - (select i_item_id item_id - ,sum(ws_ext_sales_price) ws_item_rev - from web_sales - ,item - ,date_dim - where ws_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq =(select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ws_sold_date_sk = d_date_sk - group by i_item_id) - select ss_items.item_id - ,ss_item_rev - ,ss_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ss_dev - ,cs_item_rev - ,cs_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 cs_dev - ,ws_item_rev - ,ws_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ws_dev - ,(ss_item_rev+cs_item_rev+ws_item_rev)/3 average - from ss_items,cs_items,ws_items - where ss_items.item_id=cs_items.item_id - and ss_items.item_id=ws_items.item_id - and ss_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - and ss_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and cs_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and cs_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and ws_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and ws_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - order by item_id - ,ss_item_rev - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.groovy deleted file mode 100644 index be96d9b5ffe71c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query59") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_59 ''' - explain shape plan - - - -with wss as - (select d_week_seq, - ss_store_sk, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - group by d_week_seq,ss_store_sk - ) - select s_store_name1,s_store_id1,d_week_seq1 - ,sun_sales1/sun_sales2,mon_sales1/mon_sales2 - ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2 - ,fri_sales1/fri_sales2,sat_sales1/sat_sales2 - from - (select s_store_name s_store_name1,wss.d_week_seq d_week_seq1 - ,s_store_id s_store_id1,sun_sales sun_sales1 - ,mon_sales mon_sales1,tue_sales tue_sales1 - ,wed_sales wed_sales1,thu_sales thu_sales1 - ,fri_sales fri_sales1,sat_sales sat_sales1 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196 and 1196 + 11) y, - (select s_store_name s_store_name2,wss.d_week_seq d_week_seq2 - ,s_store_id s_store_id2,sun_sales sun_sales2 - ,mon_sales mon_sales2,tue_sales tue_sales2 - ,wed_sales wed_sales2,thu_sales thu_sales2 - ,fri_sales fri_sales2,sat_sales sat_sales2 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196+ 12 and 1196 + 23) x - where s_store_id1=s_store_id2 - and d_week_seq1=d_week_seq2-52 - order by s_store_name1,s_store_id1,d_week_seq1 -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.groovy deleted file mode 100644 index bbd815ee463cb0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - // TODO: uncomment following line to get better shape - // sql 'set max_join_number_bushy_tree=6' - - qt_ds_shape_6 ''' - explain shape plan - - - - -select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.groovy deleted file mode 100644 index 6c60d4a4b7ca22..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.groovy +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query60") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_60 ''' - explain shape plan - - - - -with ss as ( - select - i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - cs as ( - select - i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - ws as ( - select - i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id) - select - i_item_id -,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by i_item_id - ,total_sales - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.groovy deleted file mode 100644 index 11dc3db78d1f35..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.groovy +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query61") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_61 ''' - explain shape plan - - - - -select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) all_sales -order by promotions, total -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.groovy deleted file mode 100644 index 6eac332278c1b4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query62") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_62 ''' - explain shape plan - - - - -select - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 30) and - (ws_ship_date_sk - ws_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 60) and - (ws_ship_date_sk - ws_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 90) and - (ws_ship_date_sk - ws_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - web_sales - ,warehouse - ,ship_mode - ,web_site - ,date_dim -where - d_month_seq between 1194 and 1194 + 11 -and ws_ship_date_sk = d_date_sk -and ws_warehouse_sk = w_warehouse_sk -and ws_ship_mode_sk = sm_ship_mode_sk -and ws_web_site_sk = web_site_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.groovy deleted file mode 100644 index 600981b9cebedc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query63") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_63 ''' - explain shape plan - - - - -select * -from (select i_manager_id - ,sum(ss_sales_price) sum_sales - ,avg(sum(ss_sales_price)) over (partition by i_manager_id) avg_monthly_sales - from item - ,store_sales - ,date_dim - ,store - where ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and d_month_seq in (1181,1181+1,1181+2,1181+3,1181+4,1181+5,1181+6,1181+7,1181+8,1181+9,1181+10,1181+11) - and (( i_category in ('Books','Children','Electronics') - and i_class in ('personal','portable','reference','self-help') - and i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) - or( i_category in ('Women','Music','Men') - and i_class in ('accessories','classical','fragrances','pants') - and i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manager_id, d_moy) tmp1 -where case when avg_monthly_sales > 0 then abs (sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 -order by i_manager_id - ,avg_monthly_sales - ,sum_sales -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.groovy deleted file mode 100644 index 9c06fec135a9a9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.groovy +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query64") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds64 = ''' - with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), - cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('blanched','medium','brown','chocolate','burlywood','drab') and - i_current_price between 23 and 23 + 10 and - i_current_price between 23 + 1 and 23 + 15 - group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year - ) - select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt - from cross_sales cs1,cross_sales cs2 - where cs1.item_sk=cs2.item_sk and - cs1.syear = 2001 and - cs2.syear = 2001 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip - order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1; - - ''' - - qt_ds_shape_64 'explain shape plan ' + ds64 - -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.groovy deleted file mode 100644 index bec1515c2ee85e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query65") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_65 ''' - explain shape plan - - - - -select - s_store_name, - i_item_desc, - sc.revenue, - i_current_price, - i_wholesale_cost, - i_brand - from store, item, - (select ss_store_sk, avg(revenue) as ave - from - (select ss_store_sk, ss_item_sk, - sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sa - group by ss_store_sk) sb, - (select ss_store_sk, ss_item_sk, sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sc - where sb.ss_store_sk = sc.ss_store_sk and - sc.revenue <= 0.1 * sb.ave and - s_store_sk = sc.ss_store_sk and - i_item_sk = sc.ss_item_sk - order by s_store_name, i_item_desc -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.groovy deleted file mode 100644 index 83804dfb545594..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.groovy +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query66") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_66 ''' - explain shape plan - - - - -select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - ,sum(jan_sales) as jan_sales - ,sum(feb_sales) as feb_sales - ,sum(mar_sales) as mar_sales - ,sum(apr_sales) as apr_sales - ,sum(may_sales) as may_sales - ,sum(jun_sales) as jun_sales - ,sum(jul_sales) as jul_sales - ,sum(aug_sales) as aug_sales - ,sum(sep_sales) as sep_sales - ,sum(oct_sales) as oct_sales - ,sum(nov_sales) as nov_sales - ,sum(dec_sales) as dec_sales - ,sum(jan_sales/w_warehouse_sq_ft) as jan_sales_per_sq_foot - ,sum(feb_sales/w_warehouse_sq_ft) as feb_sales_per_sq_foot - ,sum(mar_sales/w_warehouse_sq_ft) as mar_sales_per_sq_foot - ,sum(apr_sales/w_warehouse_sq_ft) as apr_sales_per_sq_foot - ,sum(may_sales/w_warehouse_sq_ft) as may_sales_per_sq_foot - ,sum(jun_sales/w_warehouse_sq_ft) as jun_sales_per_sq_foot - ,sum(jul_sales/w_warehouse_sq_ft) as jul_sales_per_sq_foot - ,sum(aug_sales/w_warehouse_sq_ft) as aug_sales_per_sq_foot - ,sum(sep_sales/w_warehouse_sq_ft) as sep_sales_per_sq_foot - ,sum(oct_sales/w_warehouse_sq_ft) as oct_sales_per_sq_foot - ,sum(nov_sales/w_warehouse_sq_ft) as nov_sales_per_sq_foot - ,sum(dec_sales/w_warehouse_sq_ft) as dec_sales_per_sq_foot - ,sum(jan_net) as jan_net - ,sum(feb_net) as feb_net - ,sum(mar_net) as mar_net - ,sum(apr_net) as apr_net - ,sum(may_net) as may_net - ,sum(jun_net) as jun_net - ,sum(jul_net) as jul_net - ,sum(aug_net) as aug_net - ,sum(sep_net) as sep_net - ,sum(oct_net) as oct_net - ,sum(nov_net) as nov_net - ,sum(dec_net) as dec_net - from ( - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then ws_ext_sales_price* ws_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then ws_ext_sales_price* ws_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then ws_ext_sales_price* ws_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then ws_ext_sales_price* ws_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then ws_ext_sales_price* ws_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then ws_ext_sales_price* ws_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then ws_ext_sales_price* ws_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then ws_ext_sales_price* ws_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then ws_ext_sales_price* ws_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then ws_ext_sales_price* ws_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then ws_ext_sales_price* ws_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then ws_ext_sales_price* ws_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as dec_net - from - web_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - ws_warehouse_sk = w_warehouse_sk - and ws_sold_date_sk = d_date_sk - and ws_sold_time_sk = t_time_sk - and ws_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 and 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - union all - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then cs_ext_list_price* cs_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then cs_ext_list_price* cs_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then cs_ext_list_price* cs_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then cs_ext_list_price* cs_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then cs_ext_list_price* cs_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then cs_ext_list_price* cs_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then cs_ext_list_price* cs_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then cs_ext_list_price* cs_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then cs_ext_list_price* cs_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then cs_ext_list_price* cs_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then cs_ext_list_price* cs_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then cs_ext_list_price* cs_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as dec_net - from - catalog_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and cs_sold_time_sk = t_time_sk - and cs_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 AND 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - ) x - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - order by w_warehouse_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.groovy deleted file mode 100644 index e6a5a63192f5a8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.groovy +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query67") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_67 ''' - explain shape plan - - - - -select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1206 and 1206+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.groovy deleted file mode 100644 index e9dd6bb74416c0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query68") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_68 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 8 or - household_demographics.hd_vehicle_count= -1) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Pleasant Hill','Five Points') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.groovy deleted file mode 100644 index b80eec4fc048d6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query69") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_69 ''' - explain shape plan - - - - -select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_state in ('TX','VA','MI') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - (not exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - not exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.groovy deleted file mode 100644 index 547f1c0ac3afeb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_7 ''' - explain shape plan - - - - -select i_item_id, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, item, promotion - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_cdemo_sk = cd_demo_sk and - ss_promo_sk = p_promo_sk and - cd_gender = 'F' and - cd_marital_status = 'W' and - cd_education_status = 'College' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.groovy deleted file mode 100644 index ec131a31f78102..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query70") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_70 ''' - explain shape plan - - - - -select - sum(ss_net_profit) as total_sum - ,s_state - ,s_county - ,grouping(s_state)+grouping(s_county) as lochierarchy - ,rank() over ( - partition by grouping(s_state)+grouping(s_county), - case when grouping(s_county) = 0 then s_state end - order by sum(ss_net_profit) desc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,store - where - d1.d_month_seq between 1213 and 1213+11 - and d1.d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - and s_state in - ( select s_state - from (select s_state as s_state, - rank() over ( partition by s_state order by sum(ss_net_profit) desc) as ranking - from store_sales, store, date_dim - where d_month_seq between 1213 and 1213+11 - and d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - group by s_state - ) tmp1 - where ranking <= 5 - ) - group by rollup(s_state,s_county) - order by - lochierarchy desc - ,case when lochierarchy = 0 then s_state end - ,rank_within_parent - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.groovy deleted file mode 100644 index 34836552508d38..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query71") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_71 ''' - explain shape plan - - - - -select i_brand_id brand_id, i_brand brand,t_hour,t_minute, - sum(ext_price) ext_price - from item, (select ws_ext_sales_price as ext_price, - ws_sold_date_sk as sold_date_sk, - ws_item_sk as sold_item_sk, - ws_sold_time_sk as time_sk - from web_sales,date_dim - where d_date_sk = ws_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select cs_ext_sales_price as ext_price, - cs_sold_date_sk as sold_date_sk, - cs_item_sk as sold_item_sk, - cs_sold_time_sk as time_sk - from catalog_sales,date_dim - where d_date_sk = cs_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select ss_ext_sales_price as ext_price, - ss_sold_date_sk as sold_date_sk, - ss_item_sk as sold_item_sk, - ss_sold_time_sk as time_sk - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - and d_moy=12 - and d_year=1998 - ) tmp,time_dim - where - sold_item_sk = i_item_sk - and i_manager_id=1 - and time_sk = t_time_sk - and (t_meal_time = 'breakfast' or t_meal_time = 'dinner') - group by i_brand, i_brand_id,t_hour,t_minute - order by ext_price desc, i_brand_id - ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.groovy deleted file mode 100644 index 30d399176fa3a4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query72") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_72 ''' - explain shape plan - - - - -select i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and d3.d_date > d1.d_date + 5 - and hd_buy_potential = '501-1000' - and d1.d_year = 2002 - and cd_marital_status = 'W' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.groovy deleted file mode 100644 index 906dce3697abad..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query73") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_73 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_buy_potential = '501-1000' or - household_demographics.hd_buy_potential = 'Unknown') - and household_demographics.hd_vehicle_count > 0 - and case when household_demographics.hd_vehicle_count > 0 then - household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Fairfield County','Walker County','Daviess County','Barrow County') - group by ss_ticket_number,ss_customer_sk) dj,customer - where ss_customer_sk = c_customer_sk - and cnt between 1 and 5 - order by cnt desc, c_last_name asc; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.groovy deleted file mode 100644 index cd7c8028b6d47b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.groovy +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query74") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_74 ''' - explain shape plan - - - -with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ss_net_paid) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ws_net_paid) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - ) - select - t_s_secyear.customer_id, t_s_secyear.customer_first_name, t_s_secyear.customer_last_name - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.year = 1999 - and t_s_secyear.year = 1999+1 - and t_w_firstyear.year = 1999 - and t_w_secyear.year = 1999+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - order by 2,1,3 -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.groovy deleted file mode 100644 index 1845cf2c2acd21..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.groovy +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query75") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_75 ''' - explain shape plan - - - - -WITH all_sales AS ( - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,SUM(sales_cnt) AS sales_cnt - ,SUM(sales_amt) AS sales_amt - FROM (SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,cs_quantity - COALESCE(cr_return_quantity,0) AS sales_cnt - ,cs_ext_sales_price - COALESCE(cr_return_amount,0.0) AS sales_amt - FROM catalog_sales JOIN item ON i_item_sk=cs_item_sk - JOIN date_dim ON d_date_sk=cs_sold_date_sk - LEFT JOIN catalog_returns ON (cs_order_number=cr_order_number - AND cs_item_sk=cr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ss_quantity - COALESCE(sr_return_quantity,0) AS sales_cnt - ,ss_ext_sales_price - COALESCE(sr_return_amt,0.0) AS sales_amt - FROM store_sales JOIN item ON i_item_sk=ss_item_sk - JOIN date_dim ON d_date_sk=ss_sold_date_sk - LEFT JOIN store_returns ON (ss_ticket_number=sr_ticket_number - AND ss_item_sk=sr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ws_quantity - COALESCE(wr_return_quantity,0) AS sales_cnt - ,ws_ext_sales_price - COALESCE(wr_return_amt,0.0) AS sales_amt - FROM web_sales JOIN item ON i_item_sk=ws_item_sk - JOIN date_dim ON d_date_sk=ws_sold_date_sk - LEFT JOIN web_returns ON (ws_order_number=wr_order_number - AND ws_item_sk=wr_item_sk) - WHERE i_category='Home') sales_detail - GROUP BY d_year, i_brand_id, i_class_id, i_category_id, i_manufact_id) - SELECT prev_yr.d_year AS prev_year - ,curr_yr.d_year AS year - ,curr_yr.i_brand_id - ,curr_yr.i_class_id - ,curr_yr.i_category_id - ,curr_yr.i_manufact_id - ,prev_yr.sales_cnt AS prev_yr_cnt - ,curr_yr.sales_cnt AS curr_yr_cnt - ,curr_yr.sales_cnt-prev_yr.sales_cnt AS sales_cnt_diff - ,curr_yr.sales_amt-prev_yr.sales_amt AS sales_amt_diff - FROM all_sales curr_yr, all_sales prev_yr - WHERE curr_yr.i_brand_id=prev_yr.i_brand_id - AND curr_yr.i_class_id=prev_yr.i_class_id - AND curr_yr.i_category_id=prev_yr.i_category_id - AND curr_yr.i_manufact_id=prev_yr.i_manufact_id - AND curr_yr.d_year=1999 - AND prev_yr.d_year=1999-1 - AND CAST(curr_yr.sales_cnt AS DECIMAL(17,2))/CAST(prev_yr.sales_cnt AS DECIMAL(17,2))<0.9 - ORDER BY sales_cnt_diff,sales_amt_diff - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.groovy deleted file mode 100644 index cf5fa8f8c232e5..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query76") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_76 ''' - explain shape plan - - - - -select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( - SELECT 'store' as channel, 'ss_hdemo_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price - FROM store_sales, item, date_dim - WHERE ss_hdemo_sk IS NULL - AND ss_sold_date_sk=d_date_sk - AND ss_item_sk=i_item_sk - UNION ALL - SELECT 'web' as channel, 'ws_bill_addr_sk' col_name, d_year, d_qoy, i_category, ws_ext_sales_price ext_sales_price - FROM web_sales, item, date_dim - WHERE ws_bill_addr_sk IS NULL - AND ws_sold_date_sk=d_date_sk - AND ws_item_sk=i_item_sk - UNION ALL - SELECT 'catalog' as channel, 'cs_warehouse_sk' col_name, d_year, d_qoy, i_category, cs_ext_sales_price ext_sales_price - FROM catalog_sales, item, date_dim - WHERE cs_warehouse_sk IS NULL - AND cs_sold_date_sk=d_date_sk - AND cs_item_sk=i_item_sk) foo -GROUP BY channel, col_name, d_year, d_qoy, i_category -ORDER BY channel, col_name, d_year, d_qoy, i_category -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.groovy deleted file mode 100644 index fb196585a8b692..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.groovy +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query77") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_77 ''' - explain shape plan - - - - -with ss as - (select s_store_sk, - sum(ss_ext_sales_price) as sales, - sum(ss_net_profit) as profit - from store_sales, - date_dim, - store - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ss_store_sk = s_store_sk - group by s_store_sk) - , - sr as - (select s_store_sk, - sum(sr_return_amt) as returns, - sum(sr_net_loss) as profit_loss - from store_returns, - date_dim, - store - where sr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and sr_store_sk = s_store_sk - group by s_store_sk), - cs as - (select cs_call_center_sk, - sum(cs_ext_sales_price) as sales, - sum(cs_net_profit) as profit - from catalog_sales, - date_dim - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cs_call_center_sk - ), - cr as - (select cr_call_center_sk, - sum(cr_return_amount) as returns, - sum(cr_net_loss) as profit_loss - from catalog_returns, - date_dim - where cr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cr_call_center_sk - ), - ws as - ( select wp_web_page_sk, - sum(ws_ext_sales_price) as sales, - sum(ws_net_profit) as profit - from web_sales, - date_dim, - web_page - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ws_web_page_sk = wp_web_page_sk - group by wp_web_page_sk), - wr as - (select wp_web_page_sk, - sum(wr_return_amt) as returns, - sum(wr_net_loss) as profit_loss - from web_returns, - date_dim, - web_page - where wr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and wr_web_page_sk = wp_web_page_sk - group by wp_web_page_sk) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , ss.s_store_sk as id - , sales - , coalesce(returns, 0) as returns - , (profit - coalesce(profit_loss,0)) as profit - from ss left join sr - on ss.s_store_sk = sr.s_store_sk - union all - select 'catalog channel' as channel - , cs_call_center_sk as id - , sales - , returns - , (profit - profit_loss) as profit - from cs - , cr - union all - select 'web channel' as channel - , ws.wp_web_page_sk as id - , sales - , coalesce(returns, 0) returns - , (profit - coalesce(profit_loss,0)) as profit - from ws left join wr - on ws.wp_web_page_sk = wr.wp_web_page_sk - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.groovy deleted file mode 100644 index 70587543e584fd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.groovy +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query78") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_78 ''' - explain shape plan - - - - -with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null - group by d_year, ss_item_sk, ss_customer_sk - ) - select -ss_item_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=2000 -order by - ss_item_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.groovy deleted file mode 100644 index c5eeb99d6d7603..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query79") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_79 ''' - explain shape plan - - - - -select - c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,store.s_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (household_demographics.hd_dep_count = 5 or household_demographics.hd_vehicle_count > 4) - and date_dim.d_dow = 1 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_number_employees between 200 and 295 - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer - where ss_customer_sk = c_customer_sk - order by c_last_name,c_first_name,substr(s_city,1,30), profit -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.groovy deleted file mode 100644 index b01eab837b63ac..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.groovy +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_8 ''' - explain shape plan - - - - -select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.groovy deleted file mode 100644 index c3973871b98c1d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.groovy +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query80") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_80 ''' - explain shape plan - - - - -with ssr as - (select s_store_id as store_id, - sum(ss_ext_sales_price) as sales, - sum(coalesce(sr_return_amt, 0)) as returns, - sum(ss_net_profit - coalesce(sr_net_loss, 0)) as profit - from store_sales left outer join store_returns on - (ss_item_sk = sr_item_sk and ss_ticket_number = sr_ticket_number), - date_dim, - store, - item, - promotion - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ss_store_sk = s_store_sk - and ss_item_sk = i_item_sk - and i_current_price > 50 - and ss_promo_sk = p_promo_sk - and p_channel_tv = 'N' - group by s_store_id) - , - csr as - (select cp_catalog_page_id as catalog_page_id, - sum(cs_ext_sales_price) as sales, - sum(coalesce(cr_return_amount, 0)) as returns, - sum(cs_net_profit - coalesce(cr_net_loss, 0)) as profit - from catalog_sales left outer join catalog_returns on - (cs_item_sk = cr_item_sk and cs_order_number = cr_order_number), - date_dim, - catalog_page, - item, - promotion - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and cs_catalog_page_sk = cp_catalog_page_sk - and cs_item_sk = i_item_sk - and i_current_price > 50 - and cs_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(ws_ext_sales_price) as sales, - sum(coalesce(wr_return_amt, 0)) as returns, - sum(ws_net_profit - coalesce(wr_net_loss, 0)) as profit - from web_sales left outer join web_returns on - (ws_item_sk = wr_item_sk and ws_order_number = wr_order_number), - date_dim, - web_site, - item, - promotion - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ws_web_site_sk = web_site_sk - and ws_item_sk = i_item_sk - and i_current_price > 50 - and ws_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', store_id) as id - , sales - , returns - , profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', catalog_page_id) as id - , sales - , returns - , profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) as id - , sales - , returns - , profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.groovy deleted file mode 100644 index b86d7bb4dc575d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query81") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_81 ''' - explain shape plan - - - - -with customer_total_return as - (select cr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(cr_return_amt_inc_tax) as ctr_total_return - from catalog_returns - ,date_dim - ,customer_address - where cr_returned_date_sk = d_date_sk - and d_year =2002 - and cr_returning_addr_sk = ca_address_sk - group by cr_returning_customer_sk - ,ca_state ) - select c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'CA' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.groovy deleted file mode 100644 index c6ea139fcb164b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query82") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_82 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, store_sales - where i_current_price between 17 and 17+30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-07-09' as date) and (cast('1999-07-09' as date) + interval 60 day) - and i_manufact_id in (639,169,138,339) - and inv_quantity_on_hand between 100 and 500 - and ss_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.groovy deleted file mode 100644 index c363bd4037969c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.groovy +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query83") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_83 ''' - explain shape plan - - - - -with sr_items as - (select i_item_id item_id, - sum(sr_return_quantity) sr_item_qty - from store_returns, - item, - date_dim - where sr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and sr_returned_date_sk = d_date_sk - group by i_item_id), - cr_items as - (select i_item_id item_id, - sum(cr_return_quantity) cr_item_qty - from catalog_returns, - item, - date_dim - where cr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and cr_returned_date_sk = d_date_sk - group by i_item_id), - wr_items as - (select i_item_id item_id, - sum(wr_return_quantity) wr_item_qty - from web_returns, - item, - date_dim - where wr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and wr_returned_date_sk = d_date_sk - group by i_item_id) - select sr_items.item_id - ,sr_item_qty - ,sr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 sr_dev - ,cr_item_qty - ,cr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 cr_dev - ,wr_item_qty - ,wr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 wr_dev - ,(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 average - from sr_items - ,cr_items - ,wr_items - where sr_items.item_id=cr_items.item_id - and sr_items.item_id=wr_items.item_id - order by sr_items.item_id - ,sr_item_qty - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.groovy deleted file mode 100644 index a030a4c1ae132f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query84") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_84 ''' - explain shape plan - - - - -select c_customer_id as customer_id - , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername - from customer - ,customer_address - ,customer_demographics - ,household_demographics - ,income_band - ,store_returns - where ca_city = 'Oakwood' - and c_current_addr_sk = ca_address_sk - and ib_lower_bound >= 5806 - and ib_upper_bound <= 5806 + 50000 - and ib_income_band_sk = hd_income_band_sk - and cd_demo_sk = c_current_cdemo_sk - and hd_demo_sk = c_current_hdemo_sk - and sr_cdemo_sk = cd_demo_sk - order by c_customer_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.groovy deleted file mode 100644 index c8931084b4a0e1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.groovy +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query85") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_85 ''' - explain shape plan - - - - -select substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) - from web_sales, web_returns, web_page, customer_demographics cd1, - customer_demographics cd2, customer_address, date_dim, reason - where ws_web_page_sk = wp_web_page_sk - and ws_item_sk = wr_item_sk - and ws_order_number = wr_order_number - and ws_sold_date_sk = d_date_sk and d_year = 2000 - and cd1.cd_demo_sk = wr_refunded_cdemo_sk - and cd2.cd_demo_sk = wr_returning_cdemo_sk - and ca_address_sk = wr_refunded_addr_sk - and r_reason_sk = wr_reason_sk - and - ( - ( - cd1.cd_marital_status = 'M' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = '4 yr Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 100.00 and 150.00 - ) - or - ( - cd1.cd_marital_status = 'S' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Secondary' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 50.00 and 100.00 - ) - or - ( - cd1.cd_marital_status = 'W' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Advanced Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ca_country = 'United States' - and - ca_state in ('FL', 'TX', 'DE') - and ws_net_profit between 100 and 200 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('IN', 'ND', 'ID') - and ws_net_profit between 150 and 300 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('MT', 'IL', 'OH') - and ws_net_profit between 50 and 250 - ) - ) -group by r_reason_desc -order by substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.groovy deleted file mode 100644 index 669e3fae1d357d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query86") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_86 ''' - explain shape plan - - - - -select - sum(ws_net_paid) as total_sum - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ws_net_paid) desc) as rank_within_parent - from - web_sales - ,date_dim d1 - ,item - where - d1.d_month_seq between 1224 and 1224+11 - and d1.d_date_sk = ws_sold_date_sk - and i_item_sk = ws_item_sk - group by rollup(i_category,i_class) - order by - lochierarchy desc, - case when lochierarchy = 0 then i_category end, - rank_within_parent - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.groovy deleted file mode 100644 index baf43511edeed0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query87") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_87 ''' - explain shape plan - - - - -select count(*) -from ((select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) -) cool_cust -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.groovy deleted file mode 100644 index cc1dc227786134..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.groovy +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query88") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_88 ''' - explain shape plan - - - - -select * -from - (select count(*) h8_30_to_9 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s1, - (select count(*) h9_to_9_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s2, - (select count(*) h9_30_to_10 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s3, - (select count(*) h10_to_10_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s4, - (select count(*) h10_30_to_11 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s5, - (select count(*) h11_to_11_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s6, - (select count(*) h11_30_to_12 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s7, - (select count(*) h12_to_12_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 12 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s8 -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.groovy deleted file mode 100644 index 4ca9b8f69f923d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query89") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_89 ''' - explain shape plan - - - - -select * -from( -select i_category, i_class, i_brand, - s_store_name, s_company_name, - d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, s_store_name, s_company_name) - avg_monthly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - d_year in (1999) and - ((i_category in ('Jewelry','Shoes','Electronics') and - i_class in ('semi-precious','athletic','portable') - ) - or (i_category in ('Men','Music','Women') and - i_class in ('accessories','rock','maternity') - )) -group by i_category, i_class, i_brand, - s_store_name, s_company_name, d_moy) tmp1 -where case when (avg_monthly_sales <> 0) then (abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 -order by sum_sales - avg_monthly_sales, s_store_name -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.groovy deleted file mode 100644 index 72321f8a522999..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_parallel_result_sink=false;" - - qt_ds_shape_9 ''' - explain shape plan - - - - -select case when (select count(*) - from store_sales - where ss_quantity between 1 and 20) > 2972190 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 1 and 20) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 1 and 20) end bucket1 , - case when (select count(*) - from store_sales - where ss_quantity between 21 and 40) > 4505785 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 21 and 40) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 21 and 40) end bucket2, - case when (select count(*) - from store_sales - where ss_quantity between 41 and 60) > 1575726 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 41 and 60) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 41 and 60) end bucket3, - case when (select count(*) - from store_sales - where ss_quantity between 61 and 80) > 3188917 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 61 and 80) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 61 and 80) end bucket4, - case when (select count(*) - from store_sales - where ss_quantity between 81 and 100) > 3525216 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 81 and 100) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 81 and 100) end bucket5 -from reason -where r_reason_sk = 1 -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.groovy deleted file mode 100644 index 25e457398299db..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query90") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_90 ''' - explain shape plan - - - - -select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio - from ( select count(*) amc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 10 and 10+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) at, - ( select count(*) pmc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 16 and 16+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) pt - order by am_pm_ratio - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.groovy deleted file mode 100644 index 655387b20a885c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query91") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_91 ''' - explain shape plan - - - - -select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2001 -and d_moy = 11 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like '1001-5000%' -and ca_gmt_offset = -6 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.groovy deleted file mode 100644 index 1e6bc33766f29f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query92") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_92 ''' - explain shape plan - - - - -select - sum(ws_ext_discount_amt) as "Excess Discount Amount" -from - web_sales - ,item - ,date_dim -where -i_manufact_id = 320 -and i_item_sk = ws_item_sk -and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) -and d_date_sk = ws_sold_date_sk -and ws_ext_discount_amt - > ( - SELECT - 1.3 * avg(ws_ext_discount_amt) - FROM - web_sales - ,date_dim - WHERE - ws_item_sk = i_item_sk - and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) - and d_date_sk = ws_sold_date_sk - ) -order by sum(ws_ext_discount_amt) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.groovy deleted file mode 100644 index 067576f5ac8318..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query93") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_93 ''' - explain shape plan - - - - -select ss_customer_sk - ,sum(act_sales) sumsales - from (select ss_item_sk - ,ss_ticket_number - ,ss_customer_sk - ,case when sr_return_quantity is not null then (ss_quantity-sr_return_quantity)*ss_sales_price - else (ss_quantity*ss_sales_price) end act_sales - from store_sales left outer join store_returns on (sr_item_sk = ss_item_sk - and sr_ticket_number = ss_ticket_number) - ,reason - where sr_reason_sk = r_reason_sk - and r_reason_desc = 'duplicate purchase') t - group by ss_customer_sk - order by sumsales, ss_customer_sk -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.groovy deleted file mode 100644 index 5832e550cdcdc8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query94") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_94 ''' - explain shape plan - - - - -select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2000-2-01' and - (cast('2000-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'OK' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and exists (select * - from web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) -and not exists(select * - from web_returns wr1 - where ws1.ws_order_number = wr1.wr_order_number) -order by count(distinct ws_order_number) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.groovy deleted file mode 100644 index 10a0394d19d56b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query95") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_95 ''' - explain shape plan - - -with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '1999-2-01' and - (cast('1999-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'NC' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.groovy deleted file mode 100644 index ee419513883b8b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query96") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_96 ''' - explain shape plan - - - - -select count(*) -from store_sales - ,household_demographics - ,time_dim, store -where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and household_demographics.hd_dep_count = 3 - and store.s_store_name = 'ese' -order by count(*) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.groovy deleted file mode 100644 index 253ea22b361497..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query97") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_97 ''' - explain shape plan - - - - -with ssci as ( -select ss_customer_sk customer_sk - ,ss_item_sk item_sk -from store_sales,date_dim -where ss_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by ss_customer_sk - ,ss_item_sk), -csci as( - select cs_bill_customer_sk customer_sk - ,cs_item_sk item_sk -from catalog_sales,date_dim -where cs_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by cs_bill_customer_sk - ,cs_item_sk) - select sum(case when ssci.customer_sk is not null and csci.customer_sk is null then 1 else 0 end) store_only - ,sum(case when ssci.customer_sk is null and csci.customer_sk is not null then 1 else 0 end) catalog_only - ,sum(case when ssci.customer_sk is not null and csci.customer_sk is not null then 1 else 0 end) store_and_catalog -from ssci full outer join csci on (ssci.customer_sk=csci.customer_sk - and ssci.item_sk = csci.item_sk) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.groovy deleted file mode 100644 index f48270d50ad8d6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query98") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_98 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ss_ext_sales_price) as itemrevenue - ,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over - (partition by i_class) as revenueratio -from - store_sales - ,item - ,date_dim -where - ss_item_sk = i_item_sk - and i_category in ('Sports', 'Music', 'Shoes') - and ss_sold_date_sk = d_date_sk - and d_date between cast('2002-05-20' as date) - and (cast('2002-05-20' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.groovy deleted file mode 100644 index a2bb765ae6bd3b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query99") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_99 ''' - explain shape plan - - - - -select - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 30) and - (cs_ship_date_sk - cs_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 60) and - (cs_ship_date_sk - cs_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 90) and - (cs_ship_date_sk - cs_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - catalog_sales - ,warehouse - ,ship_mode - ,call_center - ,date_dim -where - d_month_seq between 1224 and 1224 + 11 -and cs_ship_date_sk = d_date_sk -and cs_warehouse_sk = w_warehouse_sk -and cs_ship_mode_sk = sm_ship_mode_sk -and cs_call_center_sk = cc_call_center_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.groovy deleted file mode 100644 index 6fe68f53233903..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - qt_ds_shape_1 ''' - explain shape plan -with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'SD' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.groovy deleted file mode 100644 index fc895a2048160c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_10 ''' - explain shape plan - - - - -select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Storey County','Marquette County','Warren County','Cochran County','Kandiyohi County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 ANd 1+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.groovy deleted file mode 100644 index e63b9d8f685006..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.groovy +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_11 ''' - explain shape plan - - - - -with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ss_ext_list_price-ss_ext_discount_amt) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ws_ext_list_price-ws_ext_discount_amt) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 2001 - and t_s_secyear.dyear = 2001+1 - and t_w_firstyear.dyear = 2001 - and t_w_secyear.dyear = 2001+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else 0.0 end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else 0.0 end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.groovy deleted file mode 100644 index 47ac46798f9264..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_12 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ws_ext_sales_price) as itemrevenue - ,sum(ws_ext_sales_price)*100/sum(sum(ws_ext_sales_price)) over - (partition by i_class) as revenueratio -from - web_sales - ,item - ,date_dim -where - ws_item_sk = i_item_sk - and i_category in ('Books', 'Sports', 'Men') - and ws_sold_date_sk = d_date_sk - and d_date between cast('1998-04-06' as date) - and (cast('1998-04-06' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.groovy deleted file mode 100644 index 9aa9d301878f38..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.groovy +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_13 ''' - explain shape plan - - -select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Unknown' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'S' - and cd_education_status = 'College' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = '4 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('SD', 'KS', 'MI') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MO', 'ND', 'CO') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('NH', 'OH', 'TX') - and ss_net_profit between 50 and 250 - )) -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.groovy deleted file mode 100644 index a491062d05363e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_14 ''' - explain shape plan - - -with cross_items as - (select i_item_sk ss_item_sk - from item, - (select iss.i_brand_id brand_id - ,iss.i_class_id class_id - ,iss.i_category_id category_id - from store_sales - ,item iss - ,date_dim d1 - where ss_item_sk = iss.i_item_sk - and ss_sold_date_sk = d1.d_date_sk - and d1.d_year between 2000 AND 2000 + 2 - intersect - select ics.i_brand_id - ,ics.i_class_id - ,ics.i_category_id - from catalog_sales - ,item ics - ,date_dim d2 - where cs_item_sk = ics.i_item_sk - and cs_sold_date_sk = d2.d_date_sk - and d2.d_year between 2000 AND 2000 + 2 - intersect - select iws.i_brand_id - ,iws.i_class_id - ,iws.i_category_id - from web_sales - ,item iws - ,date_dim d3 - where ws_item_sk = iws.i_item_sk - and ws_sold_date_sk = d3.d_date_sk - and d3.d_year between 2000 AND 2000 + 2) - t where i_brand_id = brand_id - and i_class_id = class_id - and i_category_id = category_id -), - avg_sales as - (select avg(quantity*list_price) average_sales - from (select ss_quantity quantity - ,ss_list_price list_price - from store_sales - ,date_dim - where ss_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select cs_quantity quantity - ,cs_list_price list_price - from catalog_sales - ,date_dim - where cs_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select ws_quantity quantity - ,ws_list_price list_price - from web_sales - ,date_dim - where ws_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2) x) - select channel, i_brand_id,i_class_id,i_category_id,sum(sales), sum(number_sales) - from( - select 'store' channel, i_brand_id,i_class_id - ,i_category_id,sum(ss_quantity*ss_list_price) sales - , count(*) number_sales - from store_sales - ,item - ,date_dim - where ss_item_sk in (select ss_item_sk from cross_items) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales) - union all - select 'catalog' channel, i_brand_id,i_class_id,i_category_id, sum(cs_quantity*cs_list_price) sales, count(*) number_sales - from catalog_sales - ,item - ,date_dim - where cs_item_sk in (select ss_item_sk from cross_items) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(cs_quantity*cs_list_price) > (select average_sales from avg_sales) - union all - select 'web' channel, i_brand_id,i_class_id,i_category_id, sum(ws_quantity*ws_list_price) sales , count(*) number_sales - from web_sales - ,item - ,date_dim - where ws_item_sk in (select ss_item_sk from cross_items) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ws_quantity*ws_list_price) > (select average_sales from avg_sales) - ) y - group by rollup (channel, i_brand_id,i_class_id,i_category_id) - order by channel,i_brand_id,i_class_id,i_category_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.groovy deleted file mode 100644 index c42ff735202802..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_15 ''' - explain shape plan - - - -select ca_zip - ,sum(cs_sales_price) - from catalog_sales - ,customer - ,customer_address - ,date_dim - where cs_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', - '85392', '85460', '80348', '81792') - or ca_state in ('CA','WA','GA') - or cs_sales_price > 500) - and cs_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2001 - group by ca_zip - order by ca_zip - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.groovy deleted file mode 100644 index 6516e0d007dc92..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_16 ''' - explain shape plan - - - - -select - count(distinct cs_order_number) as "order count" - ,sum(cs_ext_ship_cost) as "total shipping cost" - ,sum(cs_net_profit) as "total net profit" -from - catalog_sales cs1 - ,date_dim - ,customer_address - ,call_center -where - d_date between '2002-4-01' and - (cast('2002-4-01' as date) + interval 60 day) -and cs1.cs_ship_date_sk = d_date_sk -and cs1.cs_ship_addr_sk = ca_address_sk -and ca_state = 'WV' -and cs1.cs_call_center_sk = cc_call_center_sk -and cc_county in ('Ziebach County','Luce County','Richland County','Daviess County', - 'Barrow County' -) -and exists (select * - from catalog_sales cs2 - where cs1.cs_order_number = cs2.cs_order_number - and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) -and not exists(select * - from catalog_returns cr1 - where cs1.cs_order_number = cr1.cr_order_number) -order by count(distinct cs_order_number) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.groovy deleted file mode 100644 index 367fee559d579b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_17 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,s_state - ,count(ss_quantity) as store_sales_quantitycount - ,avg(ss_quantity) as store_sales_quantityave - ,stddev_samp(ss_quantity) as store_sales_quantitystdev - ,stddev_samp(ss_quantity)/avg(ss_quantity) as store_sales_quantitycov - ,count(sr_return_quantity) as store_returns_quantitycount - ,avg(sr_return_quantity) as store_returns_quantityave - ,stddev_samp(sr_return_quantity) as store_returns_quantitystdev - ,stddev_samp(sr_return_quantity)/avg(sr_return_quantity) as store_returns_quantitycov - ,count(cs_quantity) as catalog_sales_quantitycount ,avg(cs_quantity) as catalog_sales_quantityave - ,stddev_samp(cs_quantity) as catalog_sales_quantitystdev - ,stddev_samp(cs_quantity)/avg(cs_quantity) as catalog_sales_quantitycov - from store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where d1.d_quarter_name = '2001Q1' - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - group by i_item_id - ,i_item_desc - ,s_state - order by i_item_id - ,i_item_desc - ,s_state -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.groovy deleted file mode 100644 index beb7e30d525844..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_18 ''' - explain shape plan - - - - -select i_item_id, - ca_country, - ca_state, - ca_county, - avg( cast(cs_quantity as decimal(12,2))) agg1, - avg( cast(cs_list_price as decimal(12,2))) agg2, - avg( cast(cs_coupon_amt as decimal(12,2))) agg3, - avg( cast(cs_sales_price as decimal(12,2))) agg4, - avg( cast(cs_net_profit as decimal(12,2))) agg5, - avg( cast(c_birth_year as decimal(12,2))) agg6, - avg( cast(cd1.cd_dep_count as decimal(12,2))) agg7 - from catalog_sales, customer_demographics cd1, - customer_demographics cd2, customer, customer_address, date_dim, item - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd1.cd_demo_sk and - cs_bill_customer_sk = c_customer_sk and - cd1.cd_gender = 'F' and - cd1.cd_education_status = 'Advanced Degree' and - c_current_cdemo_sk = cd2.cd_demo_sk and - c_current_addr_sk = ca_address_sk and - c_birth_month in (10,7,8,4,1,2) and - d_year = 1998 and - ca_state in ('WA','GA','NC' - ,'ME','WY','OK','IN') - group by rollup (i_item_id, ca_country, ca_state, ca_county) - order by ca_country, - ca_state, - ca_county, - i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.groovy deleted file mode 100644 index b845f9d6a08212..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_19 ''' - explain shape plan - - - - -select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=2 - and d_moy=12 - and d_year=1999 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.groovy deleted file mode 100644 index 89c63655fc5c3e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_ds_shape_2 ''' - explain shape plan - - - - -with wscs as - (select sold_date_sk - ,sales_price - from (select ws_sold_date_sk sold_date_sk - ,ws_ext_sales_price sales_price - from web_sales - union all - select cs_sold_date_sk sold_date_sk - ,cs_ext_sales_price sales_price - from catalog_sales) t), - wswscs as - (select d_week_seq, - sum(case when (d_day_name='Sunday') then sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then sales_price else null end) sat_sales - from wscs - ,date_dim - where d_date_sk = sold_date_sk - group by d_week_seq) - select d_week_seq1 - ,round(sun_sales1/sun_sales2,2) - ,round(mon_sales1/mon_sales2,2) - ,round(tue_sales1/tue_sales2,2) - ,round(wed_sales1/wed_sales2,2) - ,round(thu_sales1/thu_sales2,2) - ,round(fri_sales1/fri_sales2,2) - ,round(sat_sales1/sat_sales2,2) - from - (select wswscs.d_week_seq d_week_seq1 - ,sun_sales sun_sales1 - ,mon_sales mon_sales1 - ,tue_sales tue_sales1 - ,wed_sales wed_sales1 - ,thu_sales thu_sales1 - ,fri_sales fri_sales1 - ,sat_sales sat_sales1 - from wswscs,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998) y, - (select wswscs.d_week_seq d_week_seq2 - ,sun_sales sun_sales2 - ,mon_sales mon_sales2 - ,tue_sales tue_sales2 - ,wed_sales wed_sales2 - ,thu_sales thu_sales2 - ,fri_sales fri_sales2 - ,sat_sales sat_sales2 - from wswscs - ,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998+1) z - where d_week_seq1=d_week_seq2-53 - order by d_week_seq1; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.groovy deleted file mode 100644 index 2262f12f7e2a77..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_20 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(cs_ext_sales_price) as itemrevenue - ,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over - (partition by i_class) as revenueratio - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and i_category in ('Shoes', 'Books', 'Women') - and cs_sold_date_sk = d_date_sk - and d_date between cast('2002-01-26' as date) - and (cast('2002-01-26' as date) + interval 30 day) - group by i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - order by i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.groovy deleted file mode 100644 index 4d9d0f7b8f5639..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'SET enable_fold_constant_by_be = false' //plan shape will be different - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_21 ''' - explain shape plan - - - - -select * - from(select w_warehouse_name - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_before - ,sum(case when (cast(d_date as date) >= cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_after - from inventory - ,warehouse - ,item - ,date_dim - where i_current_price between 0.99 and 1.49 - and i_item_sk = inv_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_date between (cast ('2002-02-27' as date) - interval 30 day) - and (cast ('2002-02-27' as date) + interval 30 day) - group by w_warehouse_name, i_item_id) x - where (case when inv_before > 0 - then inv_after / inv_before - else null - end) between 2.0/3.0 and 3.0/2.0 - order by w_warehouse_name - ,i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.groovy deleted file mode 100644 index 0feaf26850b36f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_22 ''' - explain shape plan - - - - -select i_product_name - ,i_brand - ,i_class - ,i_category - ,avg(inv_quantity_on_hand) qoh - from inventory - ,date_dim - ,item - where inv_date_sk=d_date_sk - and inv_item_sk=i_item_sk - and d_month_seq between 1188 and 1188 + 11 - group by rollup(i_product_name - ,i_brand - ,i_class - ,i_category) -order by qoh, i_product_name, i_brand, i_class, i_category -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.groovy deleted file mode 100644 index 3d8aee1907c112..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_23 ''' - explain shape plan - - - -with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), - best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.groovy deleted file mode 100644 index 8ce1a2c29f8b7f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.groovy +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_24 ''' - explain shape plan - - - - with ssales as - (select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_profit) netpaid - from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address - where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip - and s_market_id=8 - group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) - select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid - from ssales - where i_color = 'beige' - group by c_last_name - ,c_first_name - ,s_store_name - having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) - order by c_last_name - ,c_first_name - ,s_store_name - ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.groovy deleted file mode 100644 index ca4fea0419c96e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query25") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_25 ''' - explain shape plan - - - -select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,sum(ss_net_profit) as store_sales_profit - ,sum(sr_net_loss) as store_returns_loss - ,sum(cs_net_profit) as catalog_sales_profit - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 2000 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 10 - and d2.d_year = 2000 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_moy between 4 and 10 - and d3.d_year = 2000 - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.groovy deleted file mode 100644 index a5e0a760a24706..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query26") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_26 ''' - explain shape plan - - - - -select i_item_id, - avg(cs_quantity) agg1, - avg(cs_list_price) agg2, - avg(cs_coupon_amt) agg3, - avg(cs_sales_price) agg4 - from catalog_sales, customer_demographics, date_dim, item, promotion - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd_demo_sk and - cs_promo_sk = p_promo_sk and - cd_gender = 'M' and - cd_marital_status = 'S' and - cd_education_status = 'Unknown' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.groovy deleted file mode 100644 index e0888d00f3785a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query27") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_27 ''' - explain shape plan - - - -select i_item_id, - s_state, grouping(s_state) g_state, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, store, item - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_store_sk = s_store_sk and - ss_cdemo_sk = cd_demo_sk and - cd_gender = 'F' and - cd_marital_status = 'D' and - cd_education_status = 'Secondary' and - d_year = 1999 and - s_state in ('MO','AL', 'MI', 'TN', 'LA', 'SC') - group by rollup (i_item_id, s_state) - order by i_item_id - ,s_state - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.groovy deleted file mode 100644 index c2cd90ef2d1dc6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query28") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_28 ''' - explain shape plan - - - - -select * -from (select avg(ss_list_price) B1_LP - ,count(ss_list_price) B1_CNT - ,count(distinct ss_list_price) B1_CNTD - from store_sales - where ss_quantity between 0 and 5 - and (ss_list_price between 131 and 131+10 - or ss_coupon_amt between 16798 and 16798+1000 - or ss_wholesale_cost between 25 and 25+20)) B1, - (select avg(ss_list_price) B2_LP - ,count(ss_list_price) B2_CNT - ,count(distinct ss_list_price) B2_CNTD - from store_sales - where ss_quantity between 6 and 10 - and (ss_list_price between 145 and 145+10 - or ss_coupon_amt between 14792 and 14792+1000 - or ss_wholesale_cost between 46 and 46+20)) B2, - (select avg(ss_list_price) B3_LP - ,count(ss_list_price) B3_CNT - ,count(distinct ss_list_price) B3_CNTD - from store_sales - where ss_quantity between 11 and 15 - and (ss_list_price between 150 and 150+10 - or ss_coupon_amt between 6600 and 6600+1000 - or ss_wholesale_cost between 9 and 9+20)) B3, - (select avg(ss_list_price) B4_LP - ,count(ss_list_price) B4_CNT - ,count(distinct ss_list_price) B4_CNTD - from store_sales - where ss_quantity between 16 and 20 - and (ss_list_price between 91 and 91+10 - or ss_coupon_amt between 13493 and 13493+1000 - or ss_wholesale_cost between 36 and 36+20)) B4, - (select avg(ss_list_price) B5_LP - ,count(ss_list_price) B5_CNT - ,count(distinct ss_list_price) B5_CNTD - from store_sales - where ss_quantity between 21 and 25 - and (ss_list_price between 0 and 0+10 - or ss_coupon_amt between 7629 and 7629+1000 - or ss_wholesale_cost between 6 and 6+20)) B5, - (select avg(ss_list_price) B6_LP - ,count(ss_list_price) B6_CNT - ,count(distinct ss_list_price) B6_CNTD - from store_sales - where ss_quantity between 26 and 30 - and (ss_list_price between 89 and 89+10 - or ss_coupon_amt between 15257 and 15257+1000 - or ss_wholesale_cost between 31 and 31+20)) B6 -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.groovy deleted file mode 100644 index 238f0e434d9772..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query29") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_29 ''' - explain shape plan - - - - -select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,avg(ss_quantity) as store_sales_quantity - ,avg(sr_return_quantity) as store_returns_quantity - ,avg(cs_quantity) as catalog_sales_quantity - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1999 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 4 + 3 - and d2.d_year = 1999 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_year in (1999,1999+1,1999+2) - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.groovy deleted file mode 100644 index de3b2bbbece903..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_ds_shape_3 ''' - explain shape plan - - - - -select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_sales_price) sum_agg - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manufact_id = 816 - and dt.d_moy=11 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,sum_agg desc - ,brand_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.groovy deleted file mode 100644 index a39c25ea9bd016..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query30") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_30 ''' - explain shape plan - - - - -with customer_total_return as - (select wr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(wr_return_amt) as ctr_total_return - from web_returns - ,date_dim - ,customer_address - where wr_returned_date_sk = d_date_sk - and d_year =2002 - and wr_returning_addr_sk = ca_address_sk - group by wr_returning_customer_sk - ,ca_state) - select c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'IN' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.groovy deleted file mode 100644 index 12fb0f766c7385..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query31") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_31 ''' - explain shape plan - - - - -with ss as - (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales - from store_sales,date_dim,customer_address - where ss_sold_date_sk = d_date_sk - and ss_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year), - ws as - (select ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales - from web_sales,date_dim,customer_address - where ws_sold_date_sk = d_date_sk - and ws_bill_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year) - select - ss1.ca_county - ,ss1.d_year - ,ws2.web_sales/ws1.web_sales web_q1_q2_increase - ,ss2.store_sales/ss1.store_sales store_q1_q2_increase - ,ws3.web_sales/ws2.web_sales web_q2_q3_increase - ,ss3.store_sales/ss2.store_sales store_q2_q3_increase - from - ss ss1 - ,ss ss2 - ,ss ss3 - ,ws ws1 - ,ws ws2 - ,ws ws3 - where - ss1.d_qoy = 1 - and ss1.d_year = 2000 - and ss1.ca_county = ss2.ca_county - and ss2.d_qoy = 2 - and ss2.d_year = 2000 - and ss2.ca_county = ss3.ca_county - and ss3.d_qoy = 3 - and ss3.d_year = 2000 - and ss1.ca_county = ws1.ca_county - and ws1.d_qoy = 1 - and ws1.d_year = 2000 - and ws1.ca_county = ws2.ca_county - and ws2.d_qoy = 2 - and ws2.d_year = 2000 - and ws1.ca_county = ws3.ca_county - and ws3.d_qoy = 3 - and ws3.d_year =2000 - and case when ws1.web_sales > 0 then ws2.web_sales/ws1.web_sales else null end - > case when ss1.store_sales > 0 then ss2.store_sales/ss1.store_sales else null end - and case when ws2.web_sales > 0 then ws3.web_sales/ws2.web_sales else null end - > case when ss2.store_sales > 0 then ss3.store_sales/ss2.store_sales else null end - order by web_q1_q2_increase; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.groovy deleted file mode 100644 index 1132ffd6bea511..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.groovy +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query32") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_32 ''' - explain shape plan - - - - -select sum(cs_ext_discount_amt) as "excess discount amount" -from - catalog_sales - ,item - ,date_dim -where -i_manufact_id = 29 -and i_item_sk = cs_item_sk -and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) -and d_date_sk = cs_sold_date_sk -and cs_ext_discount_amt - > ( - select - 1.3 * avg(cs_ext_discount_amt) - from - catalog_sales - ,date_dim - where - cs_item_sk = i_item_sk - and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) - and d_date_sk = cs_sold_date_sk - ) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.groovy deleted file mode 100644 index 78587586a51685..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.groovy +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query33") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_33 ''' - explain shape plan - - - - -with ss as ( - select - i_manufact_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - cs as ( - select - i_manufact_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - ws as ( - select - i_manufact_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id) - select i_manufact_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_manufact_id - order by total_sales -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.groovy deleted file mode 100644 index 379b29ccdf7c59..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query34") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_34 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '0-500') - and household_demographics.hd_vehicle_count > 0 - and (case when household_demographics.hd_vehicle_count > 0 - then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count - else null - end) > 1.2 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_county in ('Ziebach County','Daviess County','Walker County','Richland County', - 'Barrow County','Franklin Parish','Williamson County','Luce County') - group by ss_ticket_number,ss_customer_sk) dn,customer - where ss_customer_sk = c_customer_sk - and cnt between 15 and 20 - order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.groovy deleted file mode 100644 index 755ea40079011e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.groovy +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query35") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_35 ''' - explain shape plan - - - - -select - ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - count(*) cnt1, - max(cd_dep_count), - sum(cd_dep_count), - max(cd_dep_count), - cd_dep_employed_count, - count(*) cnt2, - max(cd_dep_employed_count), - sum(cd_dep_employed_count), - max(cd_dep_employed_count), - cd_dep_college_count, - count(*) cnt3, - max(cd_dep_college_count), - sum(cd_dep_college_count), - max(cd_dep_college_count) - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4)) - group by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.groovy deleted file mode 100644 index def05c702aba48..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query36") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_36 ''' - explain shape plan - - - - -select - sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ss_net_profit)/sum(ss_ext_sales_price) asc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,item - ,store - where - d1.d_year = 2002 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and s_state in ('SD','TN','GA','SC', - 'MO','AL','MI','OH') - group by rollup(i_category,i_class) - order by - lochierarchy desc - ,case when lochierarchy = 0 then i_category end - ,rank_within_parent - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.groovy deleted file mode 100644 index 82ce00de92bd42..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query37") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_37 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, catalog_sales - where i_current_price between 45 and 45 + 30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-02-21' as date) and (cast('1999-02-21' as date) + interval 60 day) - and i_manufact_id in (856,707,1000,747) - and inv_quantity_on_hand between 100 and 500 - and cs_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.groovy deleted file mode 100644 index d2175cb65cc79a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query38") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_38 ''' - explain shape plan - - - - -select count(*) from ( - select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 -) hot_cust -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.groovy deleted file mode 100644 index 9dfcaf127d58d0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query39") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_39 ''' - explain shape plan - - - - -with inv as -(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stdev,mean, case mean when 0 then null else stdev/mean end cov - from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean - from inventory - ,item - ,warehouse - ,date_dim - where inv_item_sk = i_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_year =1998 - group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo - where case mean when 0 then 0 else stdev/mean end > 1) -select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov - ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov -from inv inv1,inv inv2 -where inv1.i_item_sk = inv2.i_item_sk - and inv1.w_warehouse_sk = inv2.w_warehouse_sk - and inv1.d_moy=1 - and inv2.d_moy=1+1 -order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov - ,inv2.d_moy,inv2.mean, inv2.cov -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.groovy deleted file mode 100644 index 0470f27267ebc9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.groovy +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_4 ''' - explain shape plan - - -with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)/2) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((cs_ext_list_price-cs_ext_wholesale_cost-cs_ext_discount_amt)+cs_ext_sales_price)/2) ) year_total - ,'c' sale_type - from customer - ,catalog_sales - ,date_dim - where c_customer_sk = cs_bill_customer_sk - and cs_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year -union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((ws_ext_list_price-ws_ext_wholesale_cost-ws_ext_discount_amt)+ws_ext_sales_price)/2) ) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_c_firstyear - ,year_total t_c_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_c_secyear.customer_id - and t_s_firstyear.customer_id = t_c_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_c_firstyear.sale_type = 'c' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_c_secyear.sale_type = 'c' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1999 - and t_s_secyear.dyear = 1999+1 - and t_c_firstyear.dyear = 1999 - and t_c_secyear.dyear = 1999+1 - and t_w_firstyear.dyear = 1999 - and t_w_secyear.dyear = 1999+1 - and t_s_firstyear.year_total > 0 - and t_c_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.groovy deleted file mode 100644 index 55fdce027dc4a7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query40") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_40 ''' - explain shape plan - - - -select - w_state - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before - ,sum(case when (cast(d_date as date) >= cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after - from - catalog_sales left outer join catalog_returns on - (cs_order_number = cr_order_number - and cs_item_sk = cr_item_sk) - ,warehouse - ,item - ,date_dim - where - i_current_price between 0.99 and 1.49 - and i_item_sk = cs_item_sk - and cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and d_date between (cast ('2001-04-02' as date) - interval 30 day) - and (cast ('2001-04-02' as date) + interval 30 day) - group by - w_state,i_item_id - order by w_state,i_item_id -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.groovy deleted file mode 100644 index 3604cfa49f66ac..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query41") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_41 ''' - explain shape plan - - - - -select distinct(i_product_name) - from item i1 - where i_manufact_id between 748 and 748+40 - and (select count(*) as item_cnt - from item - where (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'gainsboro' or i_color = 'aquamarine') and - (i_units = 'Ounce' or i_units = 'Dozen') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'chiffon' or i_color = 'violet') and - (i_units = 'Ton' or i_units = 'Pound') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'chartreuse' or i_color = 'blue') and - (i_units = 'Each' or i_units = 'Oz') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'tan' or i_color = 'dodger') and - (i_units = 'Bunch' or i_units = 'Tsp') and - (i_size = 'medium' or i_size = 'economy') - ))) or - (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'blanched' or i_color = 'tomato') and - (i_units = 'Tbl' or i_units = 'Case') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'almond' or i_color = 'lime') and - (i_units = 'Box' or i_units = 'Dram') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'peru' or i_color = 'saddle') and - (i_units = 'Pallet' or i_units = 'Gram') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'indian' or i_color = 'spring') and - (i_units = 'Unknown' or i_units = 'Carton') and - (i_size = 'medium' or i_size = 'economy') - )))) > 0 - order by i_product_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.groovy deleted file mode 100644 index 1b2aad016d79eb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query42") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_42 ''' - explain shape plan - - - - -select dt.d_year - ,item.i_category_id - ,item.i_category - ,sum(ss_ext_sales_price) - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=11 - and dt.d_year=2002 - group by dt.d_year - ,item.i_category_id - ,item.i_category - order by sum(ss_ext_sales_price) desc,dt.d_year - ,item.i_category_id - ,item.i_category -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.groovy deleted file mode 100644 index f75ac2dae53e27..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query43") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_43 ''' - explain shape plan - - - - -select s_store_name, s_store_id, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from date_dim, store_sales, store - where d_date_sk = ss_sold_date_sk and - s_store_sk = ss_store_sk and - s_gmt_offset = -5 and - d_year = 2000 - group by s_store_name, s_store_id - order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.groovy deleted file mode 100644 index bccd8304610749..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query44") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_44 ''' - explain shape plan - - - - -select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.groovy deleted file mode 100644 index 54bdeaa5cd2e5e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query45") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_45 ''' - explain shape plan - - - - -select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.groovy deleted file mode 100644 index 05edaa7a4dcb79..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query46") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_46 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and (household_demographics.hd_dep_count = 6 or - household_demographics.hd_vehicle_count= 0) - and date_dim.d_dow in (6,0) - and date_dim.d_year in (1999,1999+1,1999+2) - and store.s_city in ('Five Points','Centerville','Oak Grove','Fairview','Liberty') - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.groovy deleted file mode 100644 index 6bf7243f0e2cea..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query47") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_47 ''' - explain shape plan - - -with v1 as( - select i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, - s_store_name, s_company_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - s_store_name, s_company_name - order by d_year, d_moy) rn - from item, store_sales, date_dim, store - where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - ( - d_year = 2001 or - ( d_year = 2001-1 and d_moy =12) or - ( d_year = 2001+1 and d_moy =1) - ) - group by i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy), - v2 as( - select v1.s_store_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1.s_store_name = v1_lag.s_store_name and - v1.s_store_name = v1_lead.s_store_name and - v1.s_company_name = v1_lag.s_company_name and - v1.s_company_name = v1_lead.s_company_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2001 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.groovy deleted file mode 100644 index 0ccf5809bc6d42..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.groovy +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query48") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_48 ''' - explain shape plan - - - - -select sum (ss_quantity) - from store_sales, store, customer_demographics, customer_address, date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 1999 - and - ( - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'U' - and - cd_education_status = 'Primary' - and - ss_sales_price between 100.00 and 150.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'W' - and - cd_education_status = 'College' - and - ss_sales_price between 50.00 and 100.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'D' - and - cd_education_status = '2 yr Degree' - and - ss_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MD', 'MN', 'IA') - and ss_net_profit between 0 and 2000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('VA', 'IL', 'TX') - and ss_net_profit between 150 and 3000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MI', 'WI', 'IN') - and ss_net_profit between 50 and 25000 - ) - ) -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.groovy deleted file mode 100644 index 4a9ab3f4a2ba4f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.groovy +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query49") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_49 ''' - explain shape plan - - - - -select channel, item, return_ratio, return_rank, currency_rank from - (select - 'web' as channel - ,web.item - ,web.return_ratio - ,web.return_rank - ,web.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select ws.ws_item_sk as item - ,(cast(sum(coalesce(wr.wr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(wr.wr_return_amt,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - web_sales ws left outer join web_returns wr - on (ws.ws_order_number = wr.wr_order_number and - ws.ws_item_sk = wr.wr_item_sk) - ,date_dim - where - wr.wr_return_amt > 10000 - and ws.ws_net_profit > 1 - and ws.ws_net_paid > 0 - and ws.ws_quantity > 0 - and ws_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by ws.ws_item_sk - ) in_web - ) web - where - ( - web.return_rank <= 10 - or - web.currency_rank <= 10 - ) - union - select - 'catalog' as channel - ,catalog.item - ,catalog.return_ratio - ,catalog.return_rank - ,catalog.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select - cs.cs_item_sk as item - ,(cast(sum(coalesce(cr.cr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(cr.cr_return_amount,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - catalog_sales cs left outer join catalog_returns cr - on (cs.cs_order_number = cr.cr_order_number and - cs.cs_item_sk = cr.cr_item_sk) - ,date_dim - where - cr.cr_return_amount > 10000 - and cs.cs_net_profit > 1 - and cs.cs_net_paid > 0 - and cs.cs_quantity > 0 - and cs_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by cs.cs_item_sk - ) in_cat - ) catalog - where - ( - catalog.return_rank <= 10 - or - catalog.currency_rank <=10 - ) - union - select - 'store' as channel - ,store.item - ,store.return_ratio - ,store.return_rank - ,store.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select sts.ss_item_sk as item - ,(cast(sum(coalesce(sr.sr_return_quantity,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(sr.sr_return_amt,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - store_sales sts left outer join store_returns sr - on (sts.ss_ticket_number = sr.sr_ticket_number and sts.ss_item_sk = sr.sr_item_sk) - ,date_dim - where - sr.sr_return_amt > 10000 - and sts.ss_net_profit > 1 - and sts.ss_net_paid > 0 - and sts.ss_quantity > 0 - and ss_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by sts.ss_item_sk - ) in_store - ) store - where ( - store.return_rank <= 10 - or - store.currency_rank <= 10 - ) - ) - t order by 1,4,5,2 - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.groovy deleted file mode 100644 index 7f2e92f5fe7d8f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.groovy +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_5 ''' - explain shape plan - - - - -with ssr as - (select s_store_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ss_store_sk as store_sk, - ss_sold_date_sk as date_sk, - ss_ext_sales_price as sales_price, - ss_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from store_sales - union all - select sr_store_sk as store_sk, - sr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - sr_return_amt as return_amt, - sr_net_loss as net_loss - from store_returns - ) salesreturns, - date_dim, - store - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and store_sk = s_store_sk - group by s_store_id) - , - csr as - (select cp_catalog_page_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select cs_catalog_page_sk as page_sk, - cs_sold_date_sk as date_sk, - cs_ext_sales_price as sales_price, - cs_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from catalog_sales - union all - select cr_catalog_page_sk as page_sk, - cr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - cr_return_amount as return_amt, - cr_net_loss as net_loss - from catalog_returns - ) salesreturns, - date_dim, - catalog_page - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and page_sk = cp_catalog_page_sk - group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ws_web_site_sk as wsr_web_site_sk, - ws_sold_date_sk as date_sk, - ws_ext_sales_price as sales_price, - ws_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from web_sales - union all - select ws_web_site_sk as wsr_web_site_sk, - wr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - wr_return_amt as return_amt, - wr_net_loss as net_loss - from web_returns left outer join web_sales on - ( wr_item_sk = ws_item_sk - and wr_order_number = ws_order_number) - ) salesreturns, - date_dim, - web_site - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and wsr_web_site_sk = web_site_sk - group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', s_store_id) id - , sales - , returns - , (profit - profit_loss) as profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', cp_catalog_page_id) id - , sales - , returns - , (profit - profit_loss) as profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) id - , sales - , returns - , (profit - profit_loss) as profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.groovy deleted file mode 100644 index 1cc8aa811d2dc0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query50") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_50 ''' - explain shape plan - - - - -select - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 30) and - (sr_returned_date_sk - ss_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 60) and - (sr_returned_date_sk - ss_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 90) and - (sr_returned_date_sk - ss_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - store_sales - ,store_returns - ,store - ,date_dim d1 - ,date_dim d2 -where - d2.d_year = 2001 -and d2.d_moy = 8 -and ss_ticket_number = sr_ticket_number -and ss_item_sk = sr_item_sk -and ss_sold_date_sk = d1.d_date_sk -and sr_returned_date_sk = d2.d_date_sk -and ss_customer_sk = sr_customer_sk -and ss_store_sk = s_store_sk -group by - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -order by s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.groovy deleted file mode 100644 index 3fb187cdc3ec0e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query51") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_51 ''' - explain shape plan - - - - -WITH web_v1 as ( -select - ws_item_sk item_sk, d_date, - sum(sum(ws_sales_price)) - over (partition by ws_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from web_sales - ,date_dim -where ws_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ws_item_sk is not NULL -group by ws_item_sk, d_date), -store_v1 as ( -select - ss_item_sk item_sk, d_date, - sum(sum(ss_sales_price)) - over (partition by ss_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from store_sales - ,date_dim -where ss_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ss_item_sk is not NULL -group by ss_item_sk, d_date) - select * -from (select item_sk - ,d_date - ,web_sales - ,store_sales - ,max(web_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) web_cumulative - ,max(store_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) store_cumulative - from (select case when web.item_sk is not null then web.item_sk else store.item_sk end item_sk - ,case when web.d_date is not null then web.d_date else store.d_date end d_date - ,web.cume_sales web_sales - ,store.cume_sales store_sales - from web_v1 web full outer join store_v1 store on (web.item_sk = store.item_sk - and web.d_date = store.d_date) - )x )y -where web_cumulative > store_cumulative -order by item_sk - ,d_date -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.groovy deleted file mode 100644 index 00f0d1cdce76b9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query52") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_52 ''' - explain shape plan - - - - -select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_ext_sales_price) ext_price - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=12 - and dt.d_year=2002 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,ext_price desc - ,brand_id -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.groovy deleted file mode 100644 index 0bbb9f8a7aa7aa..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query53") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_53 ''' - explain shape plan - - - - -select * from -(select i_manufact_id, -sum(ss_sales_price) sum_sales, -avg(sum(ss_sales_price)) over (partition by i_manufact_id) avg_quarterly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and -ss_sold_date_sk = d_date_sk and -ss_store_sk = s_store_sk and -d_month_seq in (1200,1200+1,1200+2,1200+3,1200+4,1200+5,1200+6,1200+7,1200+8,1200+9,1200+10,1200+11) and -((i_category in ('Books','Children','Electronics') and -i_class in ('personal','portable','reference','self-help') and -i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) -or(i_category in ('Women','Music','Men') and -i_class in ('accessories','classical','fragrances','pants') and -i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manufact_id, d_qoy ) tmp1 -where case when avg_quarterly_sales > 0 - then abs (sum_sales - avg_quarterly_sales)/ avg_quarterly_sales - else null end > 0.1 -order by avg_quarterly_sales, - sum_sales, - i_manufact_id -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.groovy deleted file mode 100644 index cf2a0806fb7a76..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.groovy +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query54") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_54 ''' - explain shape plan - - - - -with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Women' - and i_class = 'maternity' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 5 - and d_year = 1998 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1998 and d_moy = 5) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1998 and d_moy = 5) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.groovy deleted file mode 100644 index 440cc0a53a375b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query55") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_55 ''' - explain shape plan - - - - -select i_brand_id brand_id, i_brand brand, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=100 - and d_moy=12 - and d_year=2000 - group by i_brand, i_brand_id - order by ext_price desc, i_brand_id -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.groovy deleted file mode 100644 index bd8405821ee155..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.groovy +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query56") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_56 ''' - explain shape plan - - - - -with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.groovy deleted file mode 100644 index 4e7d37d147ebed..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query57") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_57 ''' - explain shape plan - - - -with v1 as( - select i_category, i_brand, - cc_name, - d_year, d_moy, - sum(cs_sales_price) sum_sales, - avg(sum(cs_sales_price)) over - (partition by i_category, i_brand, - cc_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - cc_name - order by d_year, d_moy) rn - from item, catalog_sales, date_dim, call_center - where cs_item_sk = i_item_sk and - cs_sold_date_sk = d_date_sk and - cc_call_center_sk= cs_call_center_sk and - ( - d_year = 1999 or - ( d_year = 1999-1 and d_moy =12) or - ( d_year = 1999+1 and d_moy =1) - ) - group by i_category, i_brand, - cc_name , d_year, d_moy), - v2 as( - select v1.i_brand - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1. cc_name = v1_lag. cc_name and - v1. cc_name = v1_lead. cc_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 1999 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.groovy deleted file mode 100644 index 29451458a6f266..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.groovy +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query58") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_58 ''' - explain shape plan - - - - -with ss_items as - (select i_item_id item_id - ,sum(ss_ext_sales_price) ss_item_rev - from store_sales - ,item - ,date_dim - where ss_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ss_sold_date_sk = d_date_sk - group by i_item_id), - cs_items as - (select i_item_id item_id - ,sum(cs_ext_sales_price) cs_item_rev - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and cs_sold_date_sk = d_date_sk - group by i_item_id), - ws_items as - (select i_item_id item_id - ,sum(ws_ext_sales_price) ws_item_rev - from web_sales - ,item - ,date_dim - where ws_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq =(select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ws_sold_date_sk = d_date_sk - group by i_item_id) - select ss_items.item_id - ,ss_item_rev - ,ss_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ss_dev - ,cs_item_rev - ,cs_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 cs_dev - ,ws_item_rev - ,ws_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ws_dev - ,(ss_item_rev+cs_item_rev+ws_item_rev)/3 average - from ss_items,cs_items,ws_items - where ss_items.item_id=cs_items.item_id - and ss_items.item_id=ws_items.item_id - and ss_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - and ss_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and cs_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and cs_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and ws_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and ws_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - order by item_id - ,ss_item_rev - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.groovy deleted file mode 100644 index c8b985a5290cdf..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query59") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_59 ''' - explain shape plan - - - -with wss as - (select d_week_seq, - ss_store_sk, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - group by d_week_seq,ss_store_sk - ) - select s_store_name1,s_store_id1,d_week_seq1 - ,sun_sales1/sun_sales2,mon_sales1/mon_sales2 - ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2 - ,fri_sales1/fri_sales2,sat_sales1/sat_sales2 - from - (select s_store_name s_store_name1,wss.d_week_seq d_week_seq1 - ,s_store_id s_store_id1,sun_sales sun_sales1 - ,mon_sales mon_sales1,tue_sales tue_sales1 - ,wed_sales wed_sales1,thu_sales thu_sales1 - ,fri_sales fri_sales1,sat_sales sat_sales1 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196 and 1196 + 11) y, - (select s_store_name s_store_name2,wss.d_week_seq d_week_seq2 - ,s_store_id s_store_id2,sun_sales sun_sales2 - ,mon_sales mon_sales2,tue_sales tue_sales2 - ,wed_sales wed_sales2,thu_sales thu_sales2 - ,fri_sales fri_sales2,sat_sales sat_sales2 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196+ 12 and 1196 + 23) x - where s_store_id1=s_store_id2 - and d_week_seq1=d_week_seq2-52 - order by s_store_name1,s_store_id1,d_week_seq1 -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.groovy deleted file mode 100644 index 8455347f43c105..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - // TODO: uncomment following line to get better shape - // sql 'set max_join_number_bushy_tree=6' - - qt_ds_shape_6 ''' - explain shape plan - - - - -select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.groovy deleted file mode 100644 index 02e6dedaa3c90f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.groovy +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query60") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_60 ''' - explain shape plan - - - - -with ss as ( - select - i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - cs as ( - select - i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - ws as ( - select - i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id) - select - i_item_id -,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by i_item_id - ,total_sales - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.groovy deleted file mode 100644 index 035daafadec3da..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.groovy +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query61") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_61 ''' - explain shape plan - - - - -select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) all_sales -order by promotions, total -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.groovy deleted file mode 100644 index 820da22663fc33..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query62") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_62 ''' - explain shape plan - - - - -select - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 30) and - (ws_ship_date_sk - ws_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 60) and - (ws_ship_date_sk - ws_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 90) and - (ws_ship_date_sk - ws_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - web_sales - ,warehouse - ,ship_mode - ,web_site - ,date_dim -where - d_month_seq between 1194 and 1194 + 11 -and ws_ship_date_sk = d_date_sk -and ws_warehouse_sk = w_warehouse_sk -and ws_ship_mode_sk = sm_ship_mode_sk -and ws_web_site_sk = web_site_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.groovy deleted file mode 100644 index 5873e1c2753d3d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query63") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_63 ''' - explain shape plan - - - - -select * -from (select i_manager_id - ,sum(ss_sales_price) sum_sales - ,avg(sum(ss_sales_price)) over (partition by i_manager_id) avg_monthly_sales - from item - ,store_sales - ,date_dim - ,store - where ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and d_month_seq in (1181,1181+1,1181+2,1181+3,1181+4,1181+5,1181+6,1181+7,1181+8,1181+9,1181+10,1181+11) - and (( i_category in ('Books','Children','Electronics') - and i_class in ('personal','portable','reference','self-help') - and i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) - or( i_category in ('Women','Music','Men') - and i_class in ('accessories','classical','fragrances','pants') - and i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manager_id, d_moy) tmp1 -where case when avg_monthly_sales > 0 then abs (sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 -order by i_manager_id - ,avg_monthly_sales - ,sum_sales -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.groovy deleted file mode 100644 index b4566e90f9575c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.groovy +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query64") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds64 = ''' - with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), - cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('blanched','medium','brown','chocolate','burlywood','drab') and - i_current_price between 23 and 23 + 10 and - i_current_price between 23 + 1 and 23 + 15 - group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year - ) - select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt - from cross_sales cs1,cross_sales cs2 - where cs1.item_sk=cs2.item_sk and - cs1.syear = 2001 and - cs2.syear = 2001 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip - order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1; - - ''' - - qt_ds_shape_64 'explain shape plan ' + ds64 - -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.groovy deleted file mode 100644 index 0fa05938bcf3d7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query65") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_65 ''' - explain shape plan - - - - -select - s_store_name, - i_item_desc, - sc.revenue, - i_current_price, - i_wholesale_cost, - i_brand - from store, item, - (select ss_store_sk, avg(revenue) as ave - from - (select ss_store_sk, ss_item_sk, - sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sa - group by ss_store_sk) sb, - (select ss_store_sk, ss_item_sk, sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sc - where sb.ss_store_sk = sc.ss_store_sk and - sc.revenue <= 0.1 * sb.ave and - s_store_sk = sc.ss_store_sk and - i_item_sk = sc.ss_item_sk - order by s_store_name, i_item_desc -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.groovy deleted file mode 100644 index 407bb39f6117ff..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.groovy +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query66") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_66 ''' - explain shape plan - - - - -select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - ,sum(jan_sales) as jan_sales - ,sum(feb_sales) as feb_sales - ,sum(mar_sales) as mar_sales - ,sum(apr_sales) as apr_sales - ,sum(may_sales) as may_sales - ,sum(jun_sales) as jun_sales - ,sum(jul_sales) as jul_sales - ,sum(aug_sales) as aug_sales - ,sum(sep_sales) as sep_sales - ,sum(oct_sales) as oct_sales - ,sum(nov_sales) as nov_sales - ,sum(dec_sales) as dec_sales - ,sum(jan_sales/w_warehouse_sq_ft) as jan_sales_per_sq_foot - ,sum(feb_sales/w_warehouse_sq_ft) as feb_sales_per_sq_foot - ,sum(mar_sales/w_warehouse_sq_ft) as mar_sales_per_sq_foot - ,sum(apr_sales/w_warehouse_sq_ft) as apr_sales_per_sq_foot - ,sum(may_sales/w_warehouse_sq_ft) as may_sales_per_sq_foot - ,sum(jun_sales/w_warehouse_sq_ft) as jun_sales_per_sq_foot - ,sum(jul_sales/w_warehouse_sq_ft) as jul_sales_per_sq_foot - ,sum(aug_sales/w_warehouse_sq_ft) as aug_sales_per_sq_foot - ,sum(sep_sales/w_warehouse_sq_ft) as sep_sales_per_sq_foot - ,sum(oct_sales/w_warehouse_sq_ft) as oct_sales_per_sq_foot - ,sum(nov_sales/w_warehouse_sq_ft) as nov_sales_per_sq_foot - ,sum(dec_sales/w_warehouse_sq_ft) as dec_sales_per_sq_foot - ,sum(jan_net) as jan_net - ,sum(feb_net) as feb_net - ,sum(mar_net) as mar_net - ,sum(apr_net) as apr_net - ,sum(may_net) as may_net - ,sum(jun_net) as jun_net - ,sum(jul_net) as jul_net - ,sum(aug_net) as aug_net - ,sum(sep_net) as sep_net - ,sum(oct_net) as oct_net - ,sum(nov_net) as nov_net - ,sum(dec_net) as dec_net - from ( - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then ws_ext_sales_price* ws_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then ws_ext_sales_price* ws_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then ws_ext_sales_price* ws_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then ws_ext_sales_price* ws_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then ws_ext_sales_price* ws_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then ws_ext_sales_price* ws_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then ws_ext_sales_price* ws_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then ws_ext_sales_price* ws_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then ws_ext_sales_price* ws_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then ws_ext_sales_price* ws_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then ws_ext_sales_price* ws_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then ws_ext_sales_price* ws_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as dec_net - from - web_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - ws_warehouse_sk = w_warehouse_sk - and ws_sold_date_sk = d_date_sk - and ws_sold_time_sk = t_time_sk - and ws_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 and 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - union all - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then cs_ext_list_price* cs_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then cs_ext_list_price* cs_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then cs_ext_list_price* cs_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then cs_ext_list_price* cs_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then cs_ext_list_price* cs_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then cs_ext_list_price* cs_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then cs_ext_list_price* cs_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then cs_ext_list_price* cs_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then cs_ext_list_price* cs_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then cs_ext_list_price* cs_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then cs_ext_list_price* cs_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then cs_ext_list_price* cs_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as dec_net - from - catalog_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and cs_sold_time_sk = t_time_sk - and cs_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 AND 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - ) x - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - order by w_warehouse_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.groovy deleted file mode 100644 index d337474611f5cb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.groovy +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query67") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_67 ''' - explain shape plan - - - - -select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1206 and 1206+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.groovy deleted file mode 100644 index b234cbee914806..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query68") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_68 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 8 or - household_demographics.hd_vehicle_count= -1) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Pleasant Hill','Five Points') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.groovy deleted file mode 100644 index f1b09b13229f3b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query69") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_69 ''' - explain shape plan - - - - -select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_state in ('TX','VA','MI') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - (not exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - not exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.groovy deleted file mode 100644 index 3e2f47b9a374d1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_7 ''' - explain shape plan - - - - -select i_item_id, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, item, promotion - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_cdemo_sk = cd_demo_sk and - ss_promo_sk = p_promo_sk and - cd_gender = 'F' and - cd_marital_status = 'W' and - cd_education_status = 'College' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.groovy deleted file mode 100644 index 5e6196cec5ec14..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query70") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_70 ''' - explain shape plan - - - - -select - sum(ss_net_profit) as total_sum - ,s_state - ,s_county - ,grouping(s_state)+grouping(s_county) as lochierarchy - ,rank() over ( - partition by grouping(s_state)+grouping(s_county), - case when grouping(s_county) = 0 then s_state end - order by sum(ss_net_profit) desc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,store - where - d1.d_month_seq between 1213 and 1213+11 - and d1.d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - and s_state in - ( select s_state - from (select s_state as s_state, - rank() over ( partition by s_state order by sum(ss_net_profit) desc) as ranking - from store_sales, store, date_dim - where d_month_seq between 1213 and 1213+11 - and d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - group by s_state - ) tmp1 - where ranking <= 5 - ) - group by rollup(s_state,s_county) - order by - lochierarchy desc - ,case when lochierarchy = 0 then s_state end - ,rank_within_parent - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.groovy deleted file mode 100644 index dedf696d632767..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query71") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_71 ''' - explain shape plan - - - - -select i_brand_id brand_id, i_brand brand,t_hour,t_minute, - sum(ext_price) ext_price - from item, (select ws_ext_sales_price as ext_price, - ws_sold_date_sk as sold_date_sk, - ws_item_sk as sold_item_sk, - ws_sold_time_sk as time_sk - from web_sales,date_dim - where d_date_sk = ws_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select cs_ext_sales_price as ext_price, - cs_sold_date_sk as sold_date_sk, - cs_item_sk as sold_item_sk, - cs_sold_time_sk as time_sk - from catalog_sales,date_dim - where d_date_sk = cs_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select ss_ext_sales_price as ext_price, - ss_sold_date_sk as sold_date_sk, - ss_item_sk as sold_item_sk, - ss_sold_time_sk as time_sk - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - and d_moy=12 - and d_year=1998 - ) tmp,time_dim - where - sold_item_sk = i_item_sk - and i_manager_id=1 - and time_sk = t_time_sk - and (t_meal_time = 'breakfast' or t_meal_time = 'dinner') - group by i_brand, i_brand_id,t_hour,t_minute - order by ext_price desc, i_brand_id - ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.groovy deleted file mode 100644 index 8029b93fe637cd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query72") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_72 ''' - explain shape plan - - - - -select i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and d3.d_date > d1.d_date + 5 - and hd_buy_potential = '501-1000' - and d1.d_year = 2002 - and cd_marital_status = 'W' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.groovy deleted file mode 100644 index 4dba313ee28c5d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query73") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_73 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_buy_potential = '501-1000' or - household_demographics.hd_buy_potential = 'Unknown') - and household_demographics.hd_vehicle_count > 0 - and case when household_demographics.hd_vehicle_count > 0 then - household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Fairfield County','Walker County','Daviess County','Barrow County') - group by ss_ticket_number,ss_customer_sk) dj,customer - where ss_customer_sk = c_customer_sk - and cnt between 1 and 5 - order by cnt desc, c_last_name asc; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.groovy deleted file mode 100644 index eebd2286ded0a8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.groovy +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query74") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_74 ''' - explain shape plan - - - -with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ss_net_paid) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ws_net_paid) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - ) - select - t_s_secyear.customer_id, t_s_secyear.customer_first_name, t_s_secyear.customer_last_name - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.year = 1999 - and t_s_secyear.year = 1999+1 - and t_w_firstyear.year = 1999 - and t_w_secyear.year = 1999+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - order by 2,1,3 -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.groovy deleted file mode 100644 index f1556bbc51f91f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.groovy +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query75") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_75 ''' - explain shape plan - - - - -WITH all_sales AS ( - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,SUM(sales_cnt) AS sales_cnt - ,SUM(sales_amt) AS sales_amt - FROM (SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,cs_quantity - COALESCE(cr_return_quantity,0) AS sales_cnt - ,cs_ext_sales_price - COALESCE(cr_return_amount,0.0) AS sales_amt - FROM catalog_sales JOIN item ON i_item_sk=cs_item_sk - JOIN date_dim ON d_date_sk=cs_sold_date_sk - LEFT JOIN catalog_returns ON (cs_order_number=cr_order_number - AND cs_item_sk=cr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ss_quantity - COALESCE(sr_return_quantity,0) AS sales_cnt - ,ss_ext_sales_price - COALESCE(sr_return_amt,0.0) AS sales_amt - FROM store_sales JOIN item ON i_item_sk=ss_item_sk - JOIN date_dim ON d_date_sk=ss_sold_date_sk - LEFT JOIN store_returns ON (ss_ticket_number=sr_ticket_number - AND ss_item_sk=sr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ws_quantity - COALESCE(wr_return_quantity,0) AS sales_cnt - ,ws_ext_sales_price - COALESCE(wr_return_amt,0.0) AS sales_amt - FROM web_sales JOIN item ON i_item_sk=ws_item_sk - JOIN date_dim ON d_date_sk=ws_sold_date_sk - LEFT JOIN web_returns ON (ws_order_number=wr_order_number - AND ws_item_sk=wr_item_sk) - WHERE i_category='Home') sales_detail - GROUP BY d_year, i_brand_id, i_class_id, i_category_id, i_manufact_id) - SELECT prev_yr.d_year AS prev_year - ,curr_yr.d_year AS year - ,curr_yr.i_brand_id - ,curr_yr.i_class_id - ,curr_yr.i_category_id - ,curr_yr.i_manufact_id - ,prev_yr.sales_cnt AS prev_yr_cnt - ,curr_yr.sales_cnt AS curr_yr_cnt - ,curr_yr.sales_cnt-prev_yr.sales_cnt AS sales_cnt_diff - ,curr_yr.sales_amt-prev_yr.sales_amt AS sales_amt_diff - FROM all_sales curr_yr, all_sales prev_yr - WHERE curr_yr.i_brand_id=prev_yr.i_brand_id - AND curr_yr.i_class_id=prev_yr.i_class_id - AND curr_yr.i_category_id=prev_yr.i_category_id - AND curr_yr.i_manufact_id=prev_yr.i_manufact_id - AND curr_yr.d_year=1999 - AND prev_yr.d_year=1999-1 - AND CAST(curr_yr.sales_cnt AS DECIMAL(17,2))/CAST(prev_yr.sales_cnt AS DECIMAL(17,2))<0.9 - ORDER BY sales_cnt_diff,sales_amt_diff - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.groovy deleted file mode 100644 index 9410eb8c8341c2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query76") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_76 ''' - explain shape plan - - - - -select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( - SELECT 'store' as channel, 'ss_hdemo_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price - FROM store_sales, item, date_dim - WHERE ss_hdemo_sk IS NULL - AND ss_sold_date_sk=d_date_sk - AND ss_item_sk=i_item_sk - UNION ALL - SELECT 'web' as channel, 'ws_bill_addr_sk' col_name, d_year, d_qoy, i_category, ws_ext_sales_price ext_sales_price - FROM web_sales, item, date_dim - WHERE ws_bill_addr_sk IS NULL - AND ws_sold_date_sk=d_date_sk - AND ws_item_sk=i_item_sk - UNION ALL - SELECT 'catalog' as channel, 'cs_warehouse_sk' col_name, d_year, d_qoy, i_category, cs_ext_sales_price ext_sales_price - FROM catalog_sales, item, date_dim - WHERE cs_warehouse_sk IS NULL - AND cs_sold_date_sk=d_date_sk - AND cs_item_sk=i_item_sk) foo -GROUP BY channel, col_name, d_year, d_qoy, i_category -ORDER BY channel, col_name, d_year, d_qoy, i_category -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.groovy deleted file mode 100644 index 54b615fd67f852..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.groovy +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query77") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_77 ''' - explain shape plan - - - - -with ss as - (select s_store_sk, - sum(ss_ext_sales_price) as sales, - sum(ss_net_profit) as profit - from store_sales, - date_dim, - store - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ss_store_sk = s_store_sk - group by s_store_sk) - , - sr as - (select s_store_sk, - sum(sr_return_amt) as returns, - sum(sr_net_loss) as profit_loss - from store_returns, - date_dim, - store - where sr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and sr_store_sk = s_store_sk - group by s_store_sk), - cs as - (select cs_call_center_sk, - sum(cs_ext_sales_price) as sales, - sum(cs_net_profit) as profit - from catalog_sales, - date_dim - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cs_call_center_sk - ), - cr as - (select cr_call_center_sk, - sum(cr_return_amount) as returns, - sum(cr_net_loss) as profit_loss - from catalog_returns, - date_dim - where cr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cr_call_center_sk - ), - ws as - ( select wp_web_page_sk, - sum(ws_ext_sales_price) as sales, - sum(ws_net_profit) as profit - from web_sales, - date_dim, - web_page - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ws_web_page_sk = wp_web_page_sk - group by wp_web_page_sk), - wr as - (select wp_web_page_sk, - sum(wr_return_amt) as returns, - sum(wr_net_loss) as profit_loss - from web_returns, - date_dim, - web_page - where wr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and wr_web_page_sk = wp_web_page_sk - group by wp_web_page_sk) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , ss.s_store_sk as id - , sales - , coalesce(returns, 0) as returns - , (profit - coalesce(profit_loss,0)) as profit - from ss left join sr - on ss.s_store_sk = sr.s_store_sk - union all - select 'catalog channel' as channel - , cs_call_center_sk as id - , sales - , returns - , (profit - profit_loss) as profit - from cs - , cr - union all - select 'web channel' as channel - , ws.wp_web_page_sk as id - , sales - , coalesce(returns, 0) returns - , (profit - coalesce(profit_loss,0)) as profit - from ws left join wr - on ws.wp_web_page_sk = wr.wp_web_page_sk - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.groovy deleted file mode 100644 index 6ad5e1a8be9e44..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.groovy +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query78") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_78 ''' - explain shape plan - - - - -with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null - group by d_year, ss_item_sk, ss_customer_sk - ) - select -ss_item_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=2000 -order by - ss_item_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.groovy deleted file mode 100644 index fb04d65650dd31..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query79") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_79 ''' - explain shape plan - - - - -select - c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,store.s_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (household_demographics.hd_dep_count = 5 or household_demographics.hd_vehicle_count > 4) - and date_dim.d_dow = 1 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_number_employees between 200 and 295 - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer - where ss_customer_sk = c_customer_sk - order by c_last_name,c_first_name,substr(s_city,1,30), profit -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.groovy deleted file mode 100644 index 821ffb2cf236a8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.groovy +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_8 ''' - explain shape plan - - - - -select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.groovy deleted file mode 100644 index 8ab44b3653b5bf..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.groovy +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query80") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_80 ''' - explain shape plan - - - - -with ssr as - (select s_store_id as store_id, - sum(ss_ext_sales_price) as sales, - sum(coalesce(sr_return_amt, 0)) as returns, - sum(ss_net_profit - coalesce(sr_net_loss, 0)) as profit - from store_sales left outer join store_returns on - (ss_item_sk = sr_item_sk and ss_ticket_number = sr_ticket_number), - date_dim, - store, - item, - promotion - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ss_store_sk = s_store_sk - and ss_item_sk = i_item_sk - and i_current_price > 50 - and ss_promo_sk = p_promo_sk - and p_channel_tv = 'N' - group by s_store_id) - , - csr as - (select cp_catalog_page_id as catalog_page_id, - sum(cs_ext_sales_price) as sales, - sum(coalesce(cr_return_amount, 0)) as returns, - sum(cs_net_profit - coalesce(cr_net_loss, 0)) as profit - from catalog_sales left outer join catalog_returns on - (cs_item_sk = cr_item_sk and cs_order_number = cr_order_number), - date_dim, - catalog_page, - item, - promotion - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and cs_catalog_page_sk = cp_catalog_page_sk - and cs_item_sk = i_item_sk - and i_current_price > 50 - and cs_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(ws_ext_sales_price) as sales, - sum(coalesce(wr_return_amt, 0)) as returns, - sum(ws_net_profit - coalesce(wr_net_loss, 0)) as profit - from web_sales left outer join web_returns on - (ws_item_sk = wr_item_sk and ws_order_number = wr_order_number), - date_dim, - web_site, - item, - promotion - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ws_web_site_sk = web_site_sk - and ws_item_sk = i_item_sk - and i_current_price > 50 - and ws_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', store_id) as id - , sales - , returns - , profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', catalog_page_id) as id - , sales - , returns - , profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) as id - , sales - , returns - , profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.groovy deleted file mode 100644 index 5a2fa3ae7b547d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query81") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_81 ''' - explain shape plan - - - - -with customer_total_return as - (select cr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(cr_return_amt_inc_tax) as ctr_total_return - from catalog_returns - ,date_dim - ,customer_address - where cr_returned_date_sk = d_date_sk - and d_year =2002 - and cr_returning_addr_sk = ca_address_sk - group by cr_returning_customer_sk - ,ca_state ) - select c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'CA' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.groovy deleted file mode 100644 index 39d87006a1d80f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query82") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_82 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, store_sales - where i_current_price between 17 and 17+30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-07-09' as date) and (cast('1999-07-09' as date) + interval 60 day) - and i_manufact_id in (639,169,138,339) - and inv_quantity_on_hand between 100 and 500 - and ss_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.groovy deleted file mode 100644 index 119fe440d9a756..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.groovy +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query83") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_83 ''' - explain shape plan - - - - -with sr_items as - (select i_item_id item_id, - sum(sr_return_quantity) sr_item_qty - from store_returns, - item, - date_dim - where sr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and sr_returned_date_sk = d_date_sk - group by i_item_id), - cr_items as - (select i_item_id item_id, - sum(cr_return_quantity) cr_item_qty - from catalog_returns, - item, - date_dim - where cr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and cr_returned_date_sk = d_date_sk - group by i_item_id), - wr_items as - (select i_item_id item_id, - sum(wr_return_quantity) wr_item_qty - from web_returns, - item, - date_dim - where wr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and wr_returned_date_sk = d_date_sk - group by i_item_id) - select sr_items.item_id - ,sr_item_qty - ,sr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 sr_dev - ,cr_item_qty - ,cr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 cr_dev - ,wr_item_qty - ,wr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 wr_dev - ,(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 average - from sr_items - ,cr_items - ,wr_items - where sr_items.item_id=cr_items.item_id - and sr_items.item_id=wr_items.item_id - order by sr_items.item_id - ,sr_item_qty - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.groovy deleted file mode 100644 index fb0804cececcaa..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query84") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_84 ''' - explain shape plan - - - - -select c_customer_id as customer_id - , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername - from customer - ,customer_address - ,customer_demographics - ,household_demographics - ,income_band - ,store_returns - where ca_city = 'Oakwood' - and c_current_addr_sk = ca_address_sk - and ib_lower_bound >= 5806 - and ib_upper_bound <= 5806 + 50000 - and ib_income_band_sk = hd_income_band_sk - and cd_demo_sk = c_current_cdemo_sk - and hd_demo_sk = c_current_hdemo_sk - and sr_cdemo_sk = cd_demo_sk - order by c_customer_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.groovy deleted file mode 100644 index 517301a858d109..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.groovy +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query85") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_85 ''' - explain shape plan - - - - -select substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) - from web_sales, web_returns, web_page, customer_demographics cd1, - customer_demographics cd2, customer_address, date_dim, reason - where ws_web_page_sk = wp_web_page_sk - and ws_item_sk = wr_item_sk - and ws_order_number = wr_order_number - and ws_sold_date_sk = d_date_sk and d_year = 2000 - and cd1.cd_demo_sk = wr_refunded_cdemo_sk - and cd2.cd_demo_sk = wr_returning_cdemo_sk - and ca_address_sk = wr_refunded_addr_sk - and r_reason_sk = wr_reason_sk - and - ( - ( - cd1.cd_marital_status = 'M' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = '4 yr Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 100.00 and 150.00 - ) - or - ( - cd1.cd_marital_status = 'S' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Secondary' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 50.00 and 100.00 - ) - or - ( - cd1.cd_marital_status = 'W' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Advanced Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ca_country = 'United States' - and - ca_state in ('FL', 'TX', 'DE') - and ws_net_profit between 100 and 200 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('IN', 'ND', 'ID') - and ws_net_profit between 150 and 300 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('MT', 'IL', 'OH') - and ws_net_profit between 50 and 250 - ) - ) -group by r_reason_desc -order by substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.groovy deleted file mode 100644 index c2594db8e9e045..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query86") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_86 ''' - explain shape plan - - - - -select - sum(ws_net_paid) as total_sum - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ws_net_paid) desc) as rank_within_parent - from - web_sales - ,date_dim d1 - ,item - where - d1.d_month_seq between 1224 and 1224+11 - and d1.d_date_sk = ws_sold_date_sk - and i_item_sk = ws_item_sk - group by rollup(i_category,i_class) - order by - lochierarchy desc, - case when lochierarchy = 0 then i_category end, - rank_within_parent - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.groovy deleted file mode 100644 index 51b50c928aa507..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query87") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_87 ''' - explain shape plan - - - - -select count(*) -from ((select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) -) cool_cust -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.groovy deleted file mode 100644 index 3e23dc65efdc5a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.groovy +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query88") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_88 ''' - explain shape plan - - - - -select * -from - (select count(*) h8_30_to_9 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s1, - (select count(*) h9_to_9_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s2, - (select count(*) h9_30_to_10 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s3, - (select count(*) h10_to_10_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s4, - (select count(*) h10_30_to_11 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s5, - (select count(*) h11_to_11_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s6, - (select count(*) h11_30_to_12 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s7, - (select count(*) h12_to_12_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 12 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s8 -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.groovy deleted file mode 100644 index 070a9145a60f00..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query89") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_89 ''' - explain shape plan - - - - -select * -from( -select i_category, i_class, i_brand, - s_store_name, s_company_name, - d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, s_store_name, s_company_name) - avg_monthly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - d_year in (1999) and - ((i_category in ('Jewelry','Shoes','Electronics') and - i_class in ('semi-precious','athletic','portable') - ) - or (i_category in ('Men','Music','Women') and - i_class in ('accessories','rock','maternity') - )) -group by i_category, i_class, i_brand, - s_store_name, s_company_name, d_moy) tmp1 -where case when (avg_monthly_sales <> 0) then (abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 -order by sum_sales - avg_monthly_sales, s_store_name -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.groovy deleted file mode 100644 index 8afa6c0b913a4c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_parallel_result_sink=false;" - - qt_ds_shape_9 ''' - explain shape plan - - - - -select case when (select count(*) - from store_sales - where ss_quantity between 1 and 20) > 2972190 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 1 and 20) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 1 and 20) end bucket1 , - case when (select count(*) - from store_sales - where ss_quantity between 21 and 40) > 4505785 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 21 and 40) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 21 and 40) end bucket2, - case when (select count(*) - from store_sales - where ss_quantity between 41 and 60) > 1575726 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 41 and 60) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 41 and 60) end bucket3, - case when (select count(*) - from store_sales - where ss_quantity between 61 and 80) > 3188917 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 61 and 80) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 61 and 80) end bucket4, - case when (select count(*) - from store_sales - where ss_quantity between 81 and 100) > 3525216 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 81 and 100) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 81 and 100) end bucket5 -from reason -where r_reason_sk = 1 -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.groovy deleted file mode 100644 index 359090a313e9cd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query90") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_90 ''' - explain shape plan - - - - -select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio - from ( select count(*) amc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 10 and 10+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) at, - ( select count(*) pmc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 16 and 16+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) pt - order by am_pm_ratio - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.groovy deleted file mode 100644 index a8d4b4895bf616..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query91") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_91 ''' - explain shape plan - - - - -select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2001 -and d_moy = 11 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like '1001-5000%' -and ca_gmt_offset = -6 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.groovy deleted file mode 100644 index b314dfb25374bf..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query92") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_92 ''' - explain shape plan - - - - -select - sum(ws_ext_discount_amt) as "Excess Discount Amount" -from - web_sales - ,item - ,date_dim -where -i_manufact_id = 320 -and i_item_sk = ws_item_sk -and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) -and d_date_sk = ws_sold_date_sk -and ws_ext_discount_amt - > ( - SELECT - 1.3 * avg(ws_ext_discount_amt) - FROM - web_sales - ,date_dim - WHERE - ws_item_sk = i_item_sk - and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) - and d_date_sk = ws_sold_date_sk - ) -order by sum(ws_ext_discount_amt) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.groovy deleted file mode 100644 index d504874817236c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query93") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_93 ''' - explain shape plan - - - - -select ss_customer_sk - ,sum(act_sales) sumsales - from (select ss_item_sk - ,ss_ticket_number - ,ss_customer_sk - ,case when sr_return_quantity is not null then (ss_quantity-sr_return_quantity)*ss_sales_price - else (ss_quantity*ss_sales_price) end act_sales - from store_sales left outer join store_returns on (sr_item_sk = ss_item_sk - and sr_ticket_number = ss_ticket_number) - ,reason - where sr_reason_sk = r_reason_sk - and r_reason_desc = 'duplicate purchase') t - group by ss_customer_sk - order by sumsales, ss_customer_sk -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.groovy deleted file mode 100644 index dd66d44f3efb67..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query94") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_94 ''' - explain shape plan - - - - -select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2000-2-01' and - (cast('2000-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'OK' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and exists (select * - from web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) -and not exists(select * - from web_returns wr1 - where ws1.ws_order_number = wr1.wr_order_number) -order by count(distinct ws_order_number) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.groovy deleted file mode 100644 index 9472af9d687cdc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query95") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=12" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_95 ''' - explain shape plan - - -with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '1999-2-01' and - (cast('1999-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'NC' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.groovy deleted file mode 100644 index 9973173945983f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query96") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_96 ''' - explain shape plan - - - - -select count(*) -from store_sales - ,household_demographics - ,time_dim, store -where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and household_demographics.hd_dep_count = 3 - and store.s_store_name = 'ese' -order by count(*) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.groovy deleted file mode 100644 index 7038a8179b8f87..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query97") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_97 ''' - explain shape plan - - - - -with ssci as ( -select ss_customer_sk customer_sk - ,ss_item_sk item_sk -from store_sales,date_dim -where ss_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by ss_customer_sk - ,ss_item_sk), -csci as( - select cs_bill_customer_sk customer_sk - ,cs_item_sk item_sk -from catalog_sales,date_dim -where cs_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by cs_bill_customer_sk - ,cs_item_sk) - select sum(case when ssci.customer_sk is not null and csci.customer_sk is null then 1 else 0 end) store_only - ,sum(case when ssci.customer_sk is null and csci.customer_sk is not null then 1 else 0 end) catalog_only - ,sum(case when ssci.customer_sk is not null and csci.customer_sk is not null then 1 else 0 end) store_and_catalog -from ssci full outer join csci on (ssci.customer_sk=csci.customer_sk - and ssci.item_sk = csci.item_sk) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.groovy deleted file mode 100644 index cb803a7032b4d8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query98") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_98 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ss_ext_sales_price) as itemrevenue - ,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over - (partition by i_class) as revenueratio -from - store_sales - ,item - ,date_dim -where - ss_item_sk = i_item_sk - and i_category in ('Sports', 'Music', 'Shoes') - and ss_sold_date_sk = d_date_sk - and d_date between cast('2002-05-20' as date) - and (cast('2002-05-20' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.groovy deleted file mode 100644 index 0806ca2a7bdecd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query99") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_99 ''' - explain shape plan - - - - -select - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 30) and - (cs_ship_date_sk - cs_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 60) and - (cs_ship_date_sk - cs_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 90) and - (cs_ship_date_sk - cs_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - catalog_sales - ,warehouse - ,ship_mode - ,call_center - ,date_dim -where - d_month_seq between 1224 and 1224 + 11 -and cs_ship_date_sk = d_date_sk -and cs_warehouse_sk = w_warehouse_sk -and cs_ship_mode_sk = sm_ship_mode_sk -and cs_call_center_sk = cc_call_center_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query1.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query1.groovy deleted file mode 100644 index 7b93b538c6a947..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query1.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'SD' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100""" - qt_ds_shape_1 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query10.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query10.groovy deleted file mode 100644 index 8ebc3d34f8c92b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query10.groovy +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Storey County','Marquette County','Warren County','Cochran County','Kandiyohi County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 ANd 1+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count -limit 100""" - qt_ds_shape_10 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query11.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query11.groovy deleted file mode 100644 index 1eba17bdfa959d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query11.groovy +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ss_ext_list_price-ss_ext_discount_amt) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ws_ext_list_price-ws_ext_discount_amt) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 2001 - and t_s_secyear.dyear = 2001+1 - and t_w_firstyear.dyear = 2001 - and t_w_secyear.dyear = 2001+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else 0.0 end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else 0.0 end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100""" - qt_ds_shape_11 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query12.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query12.groovy deleted file mode 100644 index 3de0eddfeacefb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query12.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ws_ext_sales_price) as itemrevenue - ,sum(ws_ext_sales_price)*100/sum(sum(ws_ext_sales_price)) over - (partition by i_class) as revenueratio -from - web_sales - ,item - ,date_dim -where - ws_item_sk = i_item_sk - and i_category in ('Books', 'Sports', 'Men') - and ws_sold_date_sk = d_date_sk - and d_date between cast('1998-04-06' as date) - and (cast('1998-04-06' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100""" - qt_ds_shape_12 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query13.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query13.groovy deleted file mode 100644 index 4bf64c23d264ea..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query13.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Unknown' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'S' - and cd_education_status = 'College' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = '4 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('SD', 'KS', 'MI') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MO', 'ND', 'CO') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('NH', 'OH', 'TX') - and ss_net_profit between 50 and 250 - )) -""" - qt_ds_shape_13 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query14.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query14.groovy deleted file mode 100644 index 29d8d419dd1536..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query14.groovy +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with cross_items as - (select i_item_sk ss_item_sk - from item, - (select iss.i_brand_id brand_id - ,iss.i_class_id class_id - ,iss.i_category_id category_id - from store_sales - ,item iss - ,date_dim d1 - where ss_item_sk = iss.i_item_sk - and ss_sold_date_sk = d1.d_date_sk - and d1.d_year between 2000 AND 2000 + 2 - intersect - select ics.i_brand_id - ,ics.i_class_id - ,ics.i_category_id - from catalog_sales - ,item ics - ,date_dim d2 - where cs_item_sk = ics.i_item_sk - and cs_sold_date_sk = d2.d_date_sk - and d2.d_year between 2000 AND 2000 + 2 - intersect - select iws.i_brand_id - ,iws.i_class_id - ,iws.i_category_id - from web_sales - ,item iws - ,date_dim d3 - where ws_item_sk = iws.i_item_sk - and ws_sold_date_sk = d3.d_date_sk - and d3.d_year between 2000 AND 2000 + 2) - t where i_brand_id = brand_id - and i_class_id = class_id - and i_category_id = category_id -), - avg_sales as - (select avg(quantity*list_price) average_sales - from (select ss_quantity quantity - ,ss_list_price list_price - from store_sales - ,date_dim - where ss_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select cs_quantity quantity - ,cs_list_price list_price - from catalog_sales - ,date_dim - where cs_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select ws_quantity quantity - ,ws_list_price list_price - from web_sales - ,date_dim - where ws_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2) x) - select channel, i_brand_id,i_class_id,i_category_id,sum(sales), sum(number_sales) - from( - select 'store' channel, i_brand_id,i_class_id - ,i_category_id,sum(ss_quantity*ss_list_price) sales - , count(*) number_sales - from store_sales - ,item - ,date_dim - where ss_item_sk in (select ss_item_sk from cross_items) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales) - union all - select 'catalog' channel, i_brand_id,i_class_id,i_category_id, sum(cs_quantity*cs_list_price) sales, count(*) number_sales - from catalog_sales - ,item - ,date_dim - where cs_item_sk in (select ss_item_sk from cross_items) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(cs_quantity*cs_list_price) > (select average_sales from avg_sales) - union all - select 'web' channel, i_brand_id,i_class_id,i_category_id, sum(ws_quantity*ws_list_price) sales , count(*) number_sales - from web_sales - ,item - ,date_dim - where ws_item_sk in (select ss_item_sk from cross_items) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ws_quantity*ws_list_price) > (select average_sales from avg_sales) - ) y - group by rollup (channel, i_brand_id,i_class_id,i_category_id) - order by channel,i_brand_id,i_class_id,i_category_id - limit 100""" - qt_ds_shape_14 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query15.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query15.groovy deleted file mode 100644 index d2d371da6c95c3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query15.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select ca_zip - ,sum(cs_sales_price) - from catalog_sales - ,customer - ,customer_address - ,date_dim - where cs_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', - '85392', '85460', '80348', '81792') - or ca_state in ('CA','WA','GA') - or cs_sales_price > 500) - and cs_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2001 - group by ca_zip - order by ca_zip - limit 100""" - qt_ds_shape_15 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query16.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query16.groovy deleted file mode 100644 index ba5e1073fccd59..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query16.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select - count(distinct cs_order_number) as "order count" - ,sum(cs_ext_ship_cost) as "total shipping cost" - ,sum(cs_net_profit) as "total net profit" -from - catalog_sales cs1 - ,date_dim - ,customer_address - ,call_center -where - d_date between '2002-4-01' and - (cast('2002-4-01' as date) + interval 60 day) -and cs1.cs_ship_date_sk = d_date_sk -and cs1.cs_ship_addr_sk = ca_address_sk -and ca_state = 'WV' -and cs1.cs_call_center_sk = cc_call_center_sk -and cc_county in ('Ziebach County','Luce County','Richland County','Daviess County', - 'Barrow County' -) -and exists (select * - from catalog_sales cs2 - where cs1.cs_order_number = cs2.cs_order_number - and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) -and not exists(select * - from catalog_returns cr1 - where cs1.cs_order_number = cr1.cr_order_number) -order by count(distinct cs_order_number) -limit 100""" - qt_ds_shape_16 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query17.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query17.groovy deleted file mode 100644 index 3d6fe36e5b0f89..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query17.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id - ,i_item_desc - ,s_state - ,count(ss_quantity) as store_sales_quantitycount - ,avg(ss_quantity) as store_sales_quantityave - ,stddev_samp(ss_quantity) as store_sales_quantitystdev - ,stddev_samp(ss_quantity)/avg(ss_quantity) as store_sales_quantitycov - ,count(sr_return_quantity) as store_returns_quantitycount - ,avg(sr_return_quantity) as store_returns_quantityave - ,stddev_samp(sr_return_quantity) as store_returns_quantitystdev - ,stddev_samp(sr_return_quantity)/avg(sr_return_quantity) as store_returns_quantitycov - ,count(cs_quantity) as catalog_sales_quantitycount ,avg(cs_quantity) as catalog_sales_quantityave - ,stddev_samp(cs_quantity) as catalog_sales_quantitystdev - ,stddev_samp(cs_quantity)/avg(cs_quantity) as catalog_sales_quantitycov - from store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where d1.d_quarter_name = '2001Q1' - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - group by i_item_id - ,i_item_desc - ,s_state - order by i_item_id - ,i_item_desc - ,s_state -limit 100""" - qt_ds_shape_17 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query18.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query18.groovy deleted file mode 100644 index ee8f64d80ea204..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query18.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id, - ca_country, - ca_state, - ca_county, - avg( cast(cs_quantity as decimal(12,2))) agg1, - avg( cast(cs_list_price as decimal(12,2))) agg2, - avg( cast(cs_coupon_amt as decimal(12,2))) agg3, - avg( cast(cs_sales_price as decimal(12,2))) agg4, - avg( cast(cs_net_profit as decimal(12,2))) agg5, - avg( cast(c_birth_year as decimal(12,2))) agg6, - avg( cast(cd1.cd_dep_count as decimal(12,2))) agg7 - from catalog_sales, customer_demographics cd1, - customer_demographics cd2, customer, customer_address, date_dim, item - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd1.cd_demo_sk and - cs_bill_customer_sk = c_customer_sk and - cd1.cd_gender = 'F' and - cd1.cd_education_status = 'Advanced Degree' and - c_current_cdemo_sk = cd2.cd_demo_sk and - c_current_addr_sk = ca_address_sk and - c_birth_month in (10,7,8,4,1,2) and - d_year = 1998 and - ca_state in ('WA','GA','NC' - ,'ME','WY','OK','IN') - group by rollup (i_item_id, ca_country, ca_state, ca_county) - order by ca_country, - ca_state, - ca_county, - i_item_id - limit 100""" - qt_ds_shape_18 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query19.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query19.groovy deleted file mode 100644 index 8461069c183e93..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query19.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=2 - and d_moy=12 - and d_year=1999 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 """ - qt_ds_shape_19 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query2.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query2.groovy deleted file mode 100644 index 61574ba16f58fa..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query2.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with wscs as - (select sold_date_sk - ,sales_price - from (select ws_sold_date_sk sold_date_sk - ,ws_ext_sales_price sales_price - from web_sales - union all - select cs_sold_date_sk sold_date_sk - ,cs_ext_sales_price sales_price - from catalog_sales) t), - wswscs as - (select d_week_seq, - sum(case when (d_day_name='Sunday') then sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then sales_price else null end) sat_sales - from wscs - ,date_dim - where d_date_sk = sold_date_sk - group by d_week_seq) - select d_week_seq1 - ,round(sun_sales1/sun_sales2,2) - ,round(mon_sales1/mon_sales2,2) - ,round(tue_sales1/tue_sales2,2) - ,round(wed_sales1/wed_sales2,2) - ,round(thu_sales1/thu_sales2,2) - ,round(fri_sales1/fri_sales2,2) - ,round(sat_sales1/sat_sales2,2) - from - (select wswscs.d_week_seq d_week_seq1 - ,sun_sales sun_sales1 - ,mon_sales mon_sales1 - ,tue_sales tue_sales1 - ,wed_sales wed_sales1 - ,thu_sales thu_sales1 - ,fri_sales fri_sales1 - ,sat_sales sat_sales1 - from wswscs,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998) y, - (select wswscs.d_week_seq d_week_seq2 - ,sun_sales sun_sales2 - ,mon_sales mon_sales2 - ,tue_sales tue_sales2 - ,wed_sales wed_sales2 - ,thu_sales thu_sales2 - ,fri_sales fri_sales2 - ,sat_sales sat_sales2 - from wswscs - ,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998+1) z - where d_week_seq1=d_week_seq2-53 - order by d_week_seq1""" - qt_ds_shape_2 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query20.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query20.groovy deleted file mode 100644 index 321e1d913dc48f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query20.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(cs_ext_sales_price) as itemrevenue - ,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over - (partition by i_class) as revenueratio - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and i_category in ('Shoes', 'Books', 'Women') - and cs_sold_date_sk = d_date_sk - and d_date between cast('2002-01-26' as date) - and (cast('2002-01-26' as date) + interval 30 day) - group by i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - order by i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100""" - qt_ds_shape_20 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query21.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query21.groovy deleted file mode 100644 index feff18960306b0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query21.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'SET enable_fold_constant_by_be = false' //plan shape will be different - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select * - from(select w_warehouse_name - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_before - ,sum(case when (cast(d_date as date) >= cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_after - from inventory - ,warehouse - ,item - ,date_dim - where i_current_price between 0.99 and 1.49 - and i_item_sk = inv_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_date between (cast ('2002-02-27' as date) - interval 30 day) - and (cast ('2002-02-27' as date) + interval 30 day) - group by w_warehouse_name, i_item_id) x - where (case when inv_before > 0 - then inv_after / inv_before - else null - end) between 2.0/3.0 and 3.0/2.0 - order by w_warehouse_name - ,i_item_id - limit 100""" - qt_ds_shape_21 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query22.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query22.groovy deleted file mode 100644 index bfb87e1402b41d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query22.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_product_name - ,i_brand - ,i_class - ,i_category - ,avg(inv_quantity_on_hand) qoh - from inventory - ,date_dim - ,item - where inv_date_sk=d_date_sk - and inv_item_sk=i_item_sk - and d_month_seq between 1188 and 1188 + 11 - group by rollup(i_product_name - ,i_brand - ,i_class - ,i_category) -order by qoh, i_product_name, i_brand, i_class, i_category -limit 100""" - qt_ds_shape_22 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query23.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query23.groovy deleted file mode 100644 index c97e2fb303e1f2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query23.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - - def ds = """with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), - best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100""" - qt_ds_shape_23 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query24.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query24.groovy deleted file mode 100644 index cfc582e615208f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query24.groovy +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with ssales as -(select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_profit) netpaid -from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address -where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip -and s_market_id=8 -group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) -select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid -from ssales -where i_color = 'beige' -group by c_last_name - ,c_first_name - ,s_store_name -having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) -order by c_last_name - ,c_first_name - ,s_store_name -""" - qt_ds_shape_24 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query25.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query25.groovy deleted file mode 100644 index 1227c775eeb559..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query25.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query25") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,sum(ss_net_profit) as store_sales_profit - ,sum(sr_net_loss) as store_returns_loss - ,sum(cs_net_profit) as catalog_sales_profit - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 2000 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 10 - and d2.d_year = 2000 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_moy between 4 and 10 - and d3.d_year = 2000 - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100""" - qt_ds_shape_25 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query26.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query26.groovy deleted file mode 100644 index e80f67d935c302..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query26.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query26") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id, - avg(cs_quantity) agg1, - avg(cs_list_price) agg2, - avg(cs_coupon_amt) agg3, - avg(cs_sales_price) agg4 - from catalog_sales, customer_demographics, date_dim, item, promotion - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd_demo_sk and - cs_promo_sk = p_promo_sk and - cd_gender = 'M' and - cd_marital_status = 'S' and - cd_education_status = 'Unknown' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100""" - qt_ds_shape_26 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query27.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query27.groovy deleted file mode 100644 index ac99f5918c6b3f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query27.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query27") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id, - s_state, grouping(s_state) g_state, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, store, item - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_store_sk = s_store_sk and - ss_cdemo_sk = cd_demo_sk and - cd_gender = 'F' and - cd_marital_status = 'D' and - cd_education_status = 'Secondary' and - d_year = 1999 and - s_state in ('MO','AL', 'MI', 'TN', 'LA', 'SC') - group by rollup (i_item_id, s_state) - order by i_item_id - ,s_state - limit 100""" - qt_ds_shape_27 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query28.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query28.groovy deleted file mode 100644 index 6d72e2f2e6142e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query28.groovy +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query28") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select * -from (select avg(ss_list_price) B1_LP - ,count(ss_list_price) B1_CNT - ,count(distinct ss_list_price) B1_CNTD - from store_sales - where ss_quantity between 0 and 5 - and (ss_list_price between 131 and 131+10 - or ss_coupon_amt between 16798 and 16798+1000 - or ss_wholesale_cost between 25 and 25+20)) B1, - (select avg(ss_list_price) B2_LP - ,count(ss_list_price) B2_CNT - ,count(distinct ss_list_price) B2_CNTD - from store_sales - where ss_quantity between 6 and 10 - and (ss_list_price between 145 and 145+10 - or ss_coupon_amt between 14792 and 14792+1000 - or ss_wholesale_cost between 46 and 46+20)) B2, - (select avg(ss_list_price) B3_LP - ,count(ss_list_price) B3_CNT - ,count(distinct ss_list_price) B3_CNTD - from store_sales - where ss_quantity between 11 and 15 - and (ss_list_price between 150 and 150+10 - or ss_coupon_amt between 6600 and 6600+1000 - or ss_wholesale_cost between 9 and 9+20)) B3, - (select avg(ss_list_price) B4_LP - ,count(ss_list_price) B4_CNT - ,count(distinct ss_list_price) B4_CNTD - from store_sales - where ss_quantity between 16 and 20 - and (ss_list_price between 91 and 91+10 - or ss_coupon_amt between 13493 and 13493+1000 - or ss_wholesale_cost between 36 and 36+20)) B4, - (select avg(ss_list_price) B5_LP - ,count(ss_list_price) B5_CNT - ,count(distinct ss_list_price) B5_CNTD - from store_sales - where ss_quantity between 21 and 25 - and (ss_list_price between 0 and 0+10 - or ss_coupon_amt between 7629 and 7629+1000 - or ss_wholesale_cost between 6 and 6+20)) B5, - (select avg(ss_list_price) B6_LP - ,count(ss_list_price) B6_CNT - ,count(distinct ss_list_price) B6_CNTD - from store_sales - where ss_quantity between 26 and 30 - and (ss_list_price between 89 and 89+10 - or ss_coupon_amt between 15257 and 15257+1000 - or ss_wholesale_cost between 31 and 31+20)) B6 -limit 100""" - qt_ds_shape_28 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query29.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query29.groovy deleted file mode 100644 index 76d0424a831ed2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query29.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query29") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,avg(ss_quantity) as store_sales_quantity - ,avg(sr_return_quantity) as store_returns_quantity - ,avg(cs_quantity) as catalog_sales_quantity - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1999 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 4 + 3 - and d2.d_year = 1999 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_year in (1999,1999+1,1999+2) - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100""" - qt_ds_shape_29 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query3.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query3.groovy deleted file mode 100644 index fdaa33c8b8f68b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query3.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_sales_price) sum_agg - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manufact_id = 816 - and dt.d_moy=11 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,sum_agg desc - ,brand_id - limit 100""" - qt_ds_shape_3 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query30.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query30.groovy deleted file mode 100644 index a5a28b942da603..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query30.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query30") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with customer_total_return as - (select wr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(wr_return_amt) as ctr_total_return - from web_returns - ,date_dim - ,customer_address - where wr_returned_date_sk = d_date_sk - and d_year =2002 - and wr_returning_addr_sk = ca_address_sk - group by wr_returning_customer_sk - ,ca_state) - select c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'IN' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return -limit 100""" - qt_ds_shape_30 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query31.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query31.groovy deleted file mode 100644 index a9e8e54f2e03cb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query31.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query31") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with ss as - (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales - from store_sales,date_dim,customer_address - where ss_sold_date_sk = d_date_sk - and ss_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year), - ws as - (select ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales - from web_sales,date_dim,customer_address - where ws_sold_date_sk = d_date_sk - and ws_bill_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year) - select - ss1.ca_county - ,ss1.d_year - ,ws2.web_sales/ws1.web_sales web_q1_q2_increase - ,ss2.store_sales/ss1.store_sales store_q1_q2_increase - ,ws3.web_sales/ws2.web_sales web_q2_q3_increase - ,ss3.store_sales/ss2.store_sales store_q2_q3_increase - from - ss ss1 - ,ss ss2 - ,ss ss3 - ,ws ws1 - ,ws ws2 - ,ws ws3 - where - ss1.d_qoy = 1 - and ss1.d_year = 2000 - and ss1.ca_county = ss2.ca_county - and ss2.d_qoy = 2 - and ss2.d_year = 2000 - and ss2.ca_county = ss3.ca_county - and ss3.d_qoy = 3 - and ss3.d_year = 2000 - and ss1.ca_county = ws1.ca_county - and ws1.d_qoy = 1 - and ws1.d_year = 2000 - and ws1.ca_county = ws2.ca_county - and ws2.d_qoy = 2 - and ws2.d_year = 2000 - and ws1.ca_county = ws3.ca_county - and ws3.d_qoy = 3 - and ws3.d_year =2000 - and case when ws1.web_sales > 0 then ws2.web_sales/ws1.web_sales else null end - > case when ss1.store_sales > 0 then ss2.store_sales/ss1.store_sales else null end - and case when ws2.web_sales > 0 then ws3.web_sales/ws2.web_sales else null end - > case when ss2.store_sales > 0 then ss3.store_sales/ss2.store_sales else null end - order by web_q1_q2_increase""" - qt_ds_shape_31 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query32.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query32.groovy deleted file mode 100644 index ff511671303007..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query32.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query32") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - - def ds = """select sum(cs_ext_discount_amt) as "excess discount amount" -from - catalog_sales - ,item - ,date_dim -where -i_manufact_id = 29 -and i_item_sk = cs_item_sk -and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) -and d_date_sk = cs_sold_date_sk -and cs_ext_discount_amt - > ( - select - 1.3 * avg(cs_ext_discount_amt) - from - catalog_sales - ,date_dim - where - cs_item_sk = i_item_sk - and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) - and d_date_sk = cs_sold_date_sk - ) -limit 100""" - qt_ds_shape_32 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query33.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query33.groovy deleted file mode 100644 index 139221711c9092..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query33.groovy +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query33") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with ss as ( - select - i_manufact_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - cs as ( - select - i_manufact_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - ws as ( - select - i_manufact_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id) - select i_manufact_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_manufact_id - order by total_sales -limit 100""" - qt_ds_shape_33 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query34.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query34.groovy deleted file mode 100644 index 2dfb1a3fab0432..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query34.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query34") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '0-500') - and household_demographics.hd_vehicle_count > 0 - and (case when household_demographics.hd_vehicle_count > 0 - then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count - else null - end) > 1.2 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_county in ('Ziebach County','Daviess County','Walker County','Richland County', - 'Barrow County','Franklin Parish','Williamson County','Luce County') - group by ss_ticket_number,ss_customer_sk) dn,customer - where ss_customer_sk = c_customer_sk - and cnt between 15 and 20 - order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number""" - qt_ds_shape_34 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query35.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query35.groovy deleted file mode 100644 index 7c579674b5bdc2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query35.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query35") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select - ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - count(*) cnt1, - max(cd_dep_count), - sum(cd_dep_count), - max(cd_dep_count), - cd_dep_employed_count, - count(*) cnt2, - max(cd_dep_employed_count), - sum(cd_dep_employed_count), - max(cd_dep_employed_count), - cd_dep_college_count, - count(*) cnt3, - max(cd_dep_college_count), - sum(cd_dep_college_count), - max(cd_dep_college_count) - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4)) - group by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100""" - qt_ds_shape_35 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query36.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query36.groovy deleted file mode 100644 index b18d481ad47615..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query36.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query36") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select - sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ss_net_profit)/sum(ss_ext_sales_price) asc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,item - ,store - where - d1.d_year = 2002 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and s_state in ('SD','TN','GA','SC', - 'MO','AL','MI','OH') - group by rollup(i_category,i_class) - order by - lochierarchy desc - ,case when lochierarchy = 0 then i_category end - ,rank_within_parent - limit 100""" - qt_ds_shape_36 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query37.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query37.groovy deleted file mode 100644 index af1b5737791539..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query37.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query37") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, catalog_sales - where i_current_price between 45 and 45 + 30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-02-21' as date) and (cast('1999-02-21' as date) + interval 60 day) - and i_manufact_id in (856,707,1000,747) - and inv_quantity_on_hand between 100 and 500 - and cs_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100""" - qt_ds_shape_37 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query38.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query38.groovy deleted file mode 100644 index 861df1065d10a2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query38.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query38") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """select count(*) from ( - select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 -) hot_cust -limit 100""" - qt_ds_shape_38 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query39.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query39.groovy deleted file mode 100644 index dde7b5e771220b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query39.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query39") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with inv as -(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stdev,mean, case mean when 0 then null else stdev/mean end cov - from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean - from inventory - ,item - ,warehouse - ,date_dim - where inv_item_sk = i_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_year =1998 - group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo - where case mean when 0 then 0 else stdev/mean end > 1) -select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov - ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov -from inv inv1,inv inv2 -where inv1.i_item_sk = inv2.i_item_sk - and inv1.w_warehouse_sk = inv2.w_warehouse_sk - and inv1.d_moy=1 - and inv2.d_moy=1+1 -order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov - ,inv2.d_moy,inv2.mean, inv2.cov""" - qt_ds_shape_39 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query4.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query4.groovy deleted file mode 100644 index 950cc47c083427..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query4.groovy +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)/2) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((cs_ext_list_price-cs_ext_wholesale_cost-cs_ext_discount_amt)+cs_ext_sales_price)/2) ) year_total - ,'c' sale_type - from customer - ,catalog_sales - ,date_dim - where c_customer_sk = cs_bill_customer_sk - and cs_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year -union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((ws_ext_list_price-ws_ext_wholesale_cost-ws_ext_discount_amt)+ws_ext_sales_price)/2) ) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_c_firstyear - ,year_total t_c_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_c_secyear.customer_id - and t_s_firstyear.customer_id = t_c_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_c_firstyear.sale_type = 'c' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_c_secyear.sale_type = 'c' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1999 - and t_s_secyear.dyear = 1999+1 - and t_c_firstyear.dyear = 1999 - and t_c_secyear.dyear = 1999+1 - and t_w_firstyear.dyear = 1999 - and t_w_secyear.dyear = 1999+1 - and t_s_firstyear.year_total > 0 - and t_c_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100""" - qt_ds_shape_4 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query40.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query40.groovy deleted file mode 100644 index ee2016698d4fc6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query40.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query40") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - w_state - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before - ,sum(case when (cast(d_date as date) >= cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after - from - catalog_sales left outer join catalog_returns on - (cs_order_number = cr_order_number - and cs_item_sk = cr_item_sk) - ,warehouse - ,item - ,date_dim - where - i_current_price between 0.99 and 1.49 - and i_item_sk = cs_item_sk - and cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and d_date between (cast ('2001-04-02' as date) - interval 30 day) - and (cast ('2001-04-02' as date) + interval 30 day) - group by - w_state,i_item_id - order by w_state,i_item_id -limit 100""" - qt_ds_shape_40 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query41.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query41.groovy deleted file mode 100644 index 9e042b7b73e7ad..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query41.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query41") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select distinct(i_product_name) - from item i1 - where i_manufact_id between 748 and 748+40 - and (select count(*) as item_cnt - from item - where (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'gainsboro' or i_color = 'aquamarine') and - (i_units = 'Ounce' or i_units = 'Dozen') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'chiffon' or i_color = 'violet') and - (i_units = 'Ton' or i_units = 'Pound') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'chartreuse' or i_color = 'blue') and - (i_units = 'Each' or i_units = 'Oz') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'tan' or i_color = 'dodger') and - (i_units = 'Bunch' or i_units = 'Tsp') and - (i_size = 'medium' or i_size = 'economy') - ))) or - (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'blanched' or i_color = 'tomato') and - (i_units = 'Tbl' or i_units = 'Case') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'almond' or i_color = 'lime') and - (i_units = 'Box' or i_units = 'Dram') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'peru' or i_color = 'saddle') and - (i_units = 'Pallet' or i_units = 'Gram') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'indian' or i_color = 'spring') and - (i_units = 'Unknown' or i_units = 'Carton') and - (i_size = 'medium' or i_size = 'economy') - )))) > 0 - order by i_product_name - limit 100""" - qt_ds_shape_41 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query42.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query42.groovy deleted file mode 100644 index a801f21595be87..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query42.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query42") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_category_id - ,item.i_category - ,sum(ss_ext_sales_price) - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=11 - and dt.d_year=2002 - group by dt.d_year - ,item.i_category_id - ,item.i_category - order by sum(ss_ext_sales_price) desc,dt.d_year - ,item.i_category_id - ,item.i_category -limit 100 """ - qt_ds_shape_42 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query43.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query43.groovy deleted file mode 100644 index 65eb573d9f663f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query43.groovy +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query43") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select s_store_name, s_store_id, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from date_dim, store_sales, store - where d_date_sk = ss_sold_date_sk and - s_store_sk = ss_store_sk and - s_gmt_offset = -5 and - d_year = 2000 - group by s_store_name, s_store_id - order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales - limit 100""" - qt_ds_shape_43 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query44.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query44.groovy deleted file mode 100644 index afa0b2691d9fe0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query44.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query44") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100""" - qt_ds_shape_44 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query45.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query45.groovy deleted file mode 100644 index 0655abe5962517..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query45.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query45") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100""" - qt_ds_shape_45 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query46.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query46.groovy deleted file mode 100644 index 56432e72023569..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query46.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query46") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and (household_demographics.hd_dep_count = 6 or - household_demographics.hd_vehicle_count= 0) - and date_dim.d_dow in (6,0) - and date_dim.d_year in (1999,1999+1,1999+2) - and store.s_city in ('Five Points','Centerville','Oak Grove','Fairview','Liberty') - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - limit 100""" - qt_ds_shape_46 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query47.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query47.groovy deleted file mode 100644 index 3cef5880701fc7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query47.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query47") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with v1 as( - select i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, - s_store_name, s_company_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - s_store_name, s_company_name - order by d_year, d_moy) rn - from item, store_sales, date_dim, store - where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - ( - d_year = 2001 or - ( d_year = 2001-1 and d_moy =12) or - ( d_year = 2001+1 and d_moy =1) - ) - group by i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy), - v2 as( - select v1.s_store_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1.s_store_name = v1_lag.s_store_name and - v1.s_store_name = v1_lead.s_store_name and - v1.s_company_name = v1_lag.s_company_name and - v1.s_company_name = v1_lead.s_company_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2001 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100""" - qt_ds_shape_47 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query48.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query48.groovy deleted file mode 100644 index 788a32a3d9af4f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query48.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query48") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select sum (ss_quantity) - from store_sales, store, customer_demographics, customer_address, date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 1999 - and - ( - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'U' - and - cd_education_status = 'Primary' - and - ss_sales_price between 100.00 and 150.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'W' - and - cd_education_status = 'College' - and - ss_sales_price between 50.00 and 100.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'D' - and - cd_education_status = '2 yr Degree' - and - ss_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MD', 'MN', 'IA') - and ss_net_profit between 0 and 2000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('VA', 'IL', 'TX') - and ss_net_profit between 150 and 3000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MI', 'WI', 'IN') - and ss_net_profit between 50 and 25000 - ) - ) -""" - qt_ds_shape_48 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query49.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query49.groovy deleted file mode 100644 index 08c2e111116002..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query49.groovy +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query49") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select channel, item, return_ratio, return_rank, currency_rank from - (select - 'web' as channel - ,web.item - ,web.return_ratio - ,web.return_rank - ,web.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select ws.ws_item_sk as item - ,(cast(sum(coalesce(wr.wr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(wr.wr_return_amt,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - web_sales ws left outer join web_returns wr - on (ws.ws_order_number = wr.wr_order_number and - ws.ws_item_sk = wr.wr_item_sk) - ,date_dim - where - wr.wr_return_amt > 10000 - and ws.ws_net_profit > 1 - and ws.ws_net_paid > 0 - and ws.ws_quantity > 0 - and ws_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by ws.ws_item_sk - ) in_web - ) web - where - ( - web.return_rank <= 10 - or - web.currency_rank <= 10 - ) - union - select - 'catalog' as channel - ,catalog.item - ,catalog.return_ratio - ,catalog.return_rank - ,catalog.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select - cs.cs_item_sk as item - ,(cast(sum(coalesce(cr.cr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(cr.cr_return_amount,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - catalog_sales cs left outer join catalog_returns cr - on (cs.cs_order_number = cr.cr_order_number and - cs.cs_item_sk = cr.cr_item_sk) - ,date_dim - where - cr.cr_return_amount > 10000 - and cs.cs_net_profit > 1 - and cs.cs_net_paid > 0 - and cs.cs_quantity > 0 - and cs_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by cs.cs_item_sk - ) in_cat - ) catalog - where - ( - catalog.return_rank <= 10 - or - catalog.currency_rank <=10 - ) - union - select - 'store' as channel - ,store.item - ,store.return_ratio - ,store.return_rank - ,store.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select sts.ss_item_sk as item - ,(cast(sum(coalesce(sr.sr_return_quantity,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(sr.sr_return_amt,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - store_sales sts left outer join store_returns sr - on (sts.ss_ticket_number = sr.sr_ticket_number and sts.ss_item_sk = sr.sr_item_sk) - ,date_dim - where - sr.sr_return_amt > 10000 - and sts.ss_net_profit > 1 - and sts.ss_net_paid > 0 - and sts.ss_quantity > 0 - and ss_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by sts.ss_item_sk - ) in_store - ) store - where ( - store.return_rank <= 10 - or - store.currency_rank <= 10 - ) - ) - t order by 1,4,5,2 - limit 100""" - qt_ds_shape_49 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query5.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query5.groovy deleted file mode 100644 index d2bb54dfc468e3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query5.groovy +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with ssr as - (select s_store_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ss_store_sk as store_sk, - ss_sold_date_sk as date_sk, - ss_ext_sales_price as sales_price, - ss_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from store_sales - union all - select sr_store_sk as store_sk, - sr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - sr_return_amt as return_amt, - sr_net_loss as net_loss - from store_returns - ) salesreturns, - date_dim, - store - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and store_sk = s_store_sk - group by s_store_id) - , - csr as - (select cp_catalog_page_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select cs_catalog_page_sk as page_sk, - cs_sold_date_sk as date_sk, - cs_ext_sales_price as sales_price, - cs_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from catalog_sales - union all - select cr_catalog_page_sk as page_sk, - cr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - cr_return_amount as return_amt, - cr_net_loss as net_loss - from catalog_returns - ) salesreturns, - date_dim, - catalog_page - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and page_sk = cp_catalog_page_sk - group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ws_web_site_sk as wsr_web_site_sk, - ws_sold_date_sk as date_sk, - ws_ext_sales_price as sales_price, - ws_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from web_sales - union all - select ws_web_site_sk as wsr_web_site_sk, - wr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - wr_return_amt as return_amt, - wr_net_loss as net_loss - from web_returns left outer join web_sales on - ( wr_item_sk = ws_item_sk - and wr_order_number = ws_order_number) - ) salesreturns, - date_dim, - web_site - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and wsr_web_site_sk = web_site_sk - group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', s_store_id) id - , sales - , returns - , (profit - profit_loss) as profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', cp_catalog_page_id) id - , sales - , returns - , (profit - profit_loss) as profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) id - , sales - , returns - , (profit - profit_loss) as profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_5 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query50.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query50.groovy deleted file mode 100644 index b13bfe4e95837e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query50.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query50") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 30) and - (sr_returned_date_sk - ss_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 60) and - (sr_returned_date_sk - ss_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 90) and - (sr_returned_date_sk - ss_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - store_sales - ,store_returns - ,store - ,date_dim d1 - ,date_dim d2 -where - d2.d_year = 2001 -and d2.d_moy = 8 -and ss_ticket_number = sr_ticket_number -and ss_item_sk = sr_item_sk -and ss_sold_date_sk = d1.d_date_sk -and sr_returned_date_sk = d2.d_date_sk -and ss_customer_sk = sr_customer_sk -and ss_store_sk = s_store_sk -group by - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -order by s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -limit 100""" - qt_ds_shape_50 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query51.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query51.groovy deleted file mode 100644 index 3db2f3a5abb815..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query51.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query51") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """WITH web_v1 as ( -select - ws_item_sk item_sk, d_date, - sum(sum(ws_sales_price)) - over (partition by ws_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from web_sales - ,date_dim -where ws_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ws_item_sk is not NULL -group by ws_item_sk, d_date), -store_v1 as ( -select - ss_item_sk item_sk, d_date, - sum(sum(ss_sales_price)) - over (partition by ss_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from store_sales - ,date_dim -where ss_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ss_item_sk is not NULL -group by ss_item_sk, d_date) - select * -from (select item_sk - ,d_date - ,web_sales - ,store_sales - ,max(web_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) web_cumulative - ,max(store_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) store_cumulative - from (select case when web.item_sk is not null then web.item_sk else store.item_sk end item_sk - ,case when web.d_date is not null then web.d_date else store.d_date end d_date - ,web.cume_sales web_sales - ,store.cume_sales store_sales - from web_v1 web full outer join store_v1 store on (web.item_sk = store.item_sk - and web.d_date = store.d_date) - )x )y -where web_cumulative > store_cumulative -order by item_sk - ,d_date -limit 100""" - qt_ds_shape_51 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query52.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query52.groovy deleted file mode 100644 index 87393c0825fb46..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query52.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query52") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_ext_sales_price) ext_price - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=12 - and dt.d_year=2002 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,ext_price desc - ,brand_id -limit 100 """ - qt_ds_shape_52 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query53.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query53.groovy deleted file mode 100644 index 8c249ff3044e8e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query53.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query53") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * from -(select i_manufact_id, -sum(ss_sales_price) sum_sales, -avg(sum(ss_sales_price)) over (partition by i_manufact_id) avg_quarterly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and -ss_sold_date_sk = d_date_sk and -ss_store_sk = s_store_sk and -d_month_seq in (1200,1200+1,1200+2,1200+3,1200+4,1200+5,1200+6,1200+7,1200+8,1200+9,1200+10,1200+11) and -((i_category in ('Books','Children','Electronics') and -i_class in ('personal','portable','reference','self-help') and -i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) -or(i_category in ('Women','Music','Men') and -i_class in ('accessories','classical','fragrances','pants') and -i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manufact_id, d_qoy ) tmp1 -where case when avg_quarterly_sales > 0 - then abs (sum_sales - avg_quarterly_sales)/ avg_quarterly_sales - else null end > 0.1 -order by avg_quarterly_sales, - sum_sales, - i_manufact_id -limit 100""" - qt_ds_shape_53 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query54.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query54.groovy deleted file mode 100644 index 083e4d648d299b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query54.groovy +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query54") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Women' - and i_class = 'maternity' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 5 - and d_year = 1998 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1998 and d_moy = 5) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1998 and d_moy = 5) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100""" - qt_ds_shape_54 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query55.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query55.groovy deleted file mode 100644 index dab3d5e7cde7f4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query55.groovy +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query55") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=100 - and d_moy=12 - and d_year=2000 - group by i_brand, i_brand_id - order by ext_price desc, i_brand_id -limit 100 """ - qt_ds_shape_55 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query56.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query56.groovy deleted file mode 100644 index 2143dcb2dd4795..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query56.groovy +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query56") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100""" - qt_ds_shape_56 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query57.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query57.groovy deleted file mode 100644 index e3e2a1ad57e5b9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query57.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query57") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with v1 as( - select i_category, i_brand, - cc_name, - d_year, d_moy, - sum(cs_sales_price) sum_sales, - avg(sum(cs_sales_price)) over - (partition by i_category, i_brand, - cc_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - cc_name - order by d_year, d_moy) rn - from item, catalog_sales, date_dim, call_center - where cs_item_sk = i_item_sk and - cs_sold_date_sk = d_date_sk and - cc_call_center_sk= cs_call_center_sk and - ( - d_year = 1999 or - ( d_year = 1999-1 and d_moy =12) or - ( d_year = 1999+1 and d_moy =1) - ) - group by i_category, i_brand, - cc_name , d_year, d_moy), - v2 as( - select v1.i_brand - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1. cc_name = v1_lag. cc_name and - v1. cc_name = v1_lead. cc_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 1999 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100""" - qt_ds_shape_57 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query58.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query58.groovy deleted file mode 100644 index c160d0ada1ba91..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query58.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query58") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss_items as - (select i_item_id item_id - ,sum(ss_ext_sales_price) ss_item_rev - from store_sales - ,item - ,date_dim - where ss_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ss_sold_date_sk = d_date_sk - group by i_item_id), - cs_items as - (select i_item_id item_id - ,sum(cs_ext_sales_price) cs_item_rev - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and cs_sold_date_sk = d_date_sk - group by i_item_id), - ws_items as - (select i_item_id item_id - ,sum(ws_ext_sales_price) ws_item_rev - from web_sales - ,item - ,date_dim - where ws_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq =(select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ws_sold_date_sk = d_date_sk - group by i_item_id) - select ss_items.item_id - ,ss_item_rev - ,ss_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ss_dev - ,cs_item_rev - ,cs_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 cs_dev - ,ws_item_rev - ,ws_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ws_dev - ,(ss_item_rev+cs_item_rev+ws_item_rev)/3 average - from ss_items,cs_items,ws_items - where ss_items.item_id=cs_items.item_id - and ss_items.item_id=ws_items.item_id - and ss_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - and ss_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and cs_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and cs_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and ws_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and ws_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - order by item_id - ,ss_item_rev - limit 100""" - qt_ds_shape_58 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query59.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query59.groovy deleted file mode 100644 index 79d68ee081ff36..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query59.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query59") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with wss as - (select d_week_seq, - ss_store_sk, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - group by d_week_seq,ss_store_sk - ) - select s_store_name1,s_store_id1,d_week_seq1 - ,sun_sales1/sun_sales2,mon_sales1/mon_sales2 - ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2 - ,fri_sales1/fri_sales2,sat_sales1/sat_sales2 - from - (select s_store_name s_store_name1,wss.d_week_seq d_week_seq1 - ,s_store_id s_store_id1,sun_sales sun_sales1 - ,mon_sales mon_sales1,tue_sales tue_sales1 - ,wed_sales wed_sales1,thu_sales thu_sales1 - ,fri_sales fri_sales1,sat_sales sat_sales1 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196 and 1196 + 11) y, - (select s_store_name s_store_name2,wss.d_week_seq d_week_seq2 - ,s_store_id s_store_id2,sun_sales sun_sales2 - ,mon_sales mon_sales2,tue_sales tue_sales2 - ,wed_sales wed_sales2,thu_sales thu_sales2 - ,fri_sales fri_sales2,sat_sales sat_sales2 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196+ 12 and 1196 + 23) x - where s_store_id1=s_store_id2 - and d_week_seq1=d_week_seq2-52 - order by s_store_name1,s_store_id1,d_week_seq1 -limit 100""" - qt_ds_shape_59 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query6.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query6.groovy deleted file mode 100644 index 9a18350057d29c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query6.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100""" - qt_ds_shape_6 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query60.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query60.groovy deleted file mode 100644 index a778c869418df6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query60.groovy +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query60") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select - i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - cs as ( - select - i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - ws as ( - select - i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id) - select - i_item_id -,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by i_item_id - ,total_sales - limit 100""" - qt_ds_shape_60 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query61.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query61.groovy deleted file mode 100644 index 3efb6f2051c734..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query61.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query61") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) all_sales -order by promotions, total -limit 100""" - qt_ds_shape_61 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query62.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query62.groovy deleted file mode 100644 index d9e8e6a5638e42..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query62.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query62") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 30) and - (ws_ship_date_sk - ws_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 60) and - (ws_ship_date_sk - ws_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 90) and - (ws_ship_date_sk - ws_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - web_sales - ,warehouse - ,ship_mode - ,web_site - ,date_dim -where - d_month_seq between 1194 and 1194 + 11 -and ws_ship_date_sk = d_date_sk -and ws_warehouse_sk = w_warehouse_sk -and ws_ship_mode_sk = sm_ship_mode_sk -and ws_web_site_sk = web_site_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -limit 100""" - qt_ds_shape_62 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query63.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query63.groovy deleted file mode 100644 index ef7bfcb661924c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query63.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query63") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select i_manager_id - ,sum(ss_sales_price) sum_sales - ,avg(sum(ss_sales_price)) over (partition by i_manager_id) avg_monthly_sales - from item - ,store_sales - ,date_dim - ,store - where ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and d_month_seq in (1181,1181+1,1181+2,1181+3,1181+4,1181+5,1181+6,1181+7,1181+8,1181+9,1181+10,1181+11) - and (( i_category in ('Books','Children','Electronics') - and i_class in ('personal','portable','reference','self-help') - and i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) - or( i_category in ('Women','Music','Men') - and i_class in ('accessories','classical','fragrances','pants') - and i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manager_id, d_moy) tmp1 -where case when avg_monthly_sales > 0 then abs (sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 -order by i_manager_id - ,avg_monthly_sales - ,sum_sales -limit 100""" - qt_ds_shape_63 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query64.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query64.groovy deleted file mode 100644 index d0c4ada0d677ae..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query64.groovy +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query64") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), -cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('blanched','medium','brown','chocolate','burlywood','drab') and - i_current_price between 23 and 23 + 10 and - i_current_price between 23 + 1 and 23 + 15 -group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year -) -select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt -from cross_sales cs1,cross_sales cs2 -where cs1.item_sk=cs2.item_sk and - cs1.syear = 2001 and - cs2.syear = 2001 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip -order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1""" - qt_ds_shape_64 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query65.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query65.groovy deleted file mode 100644 index 13ff66a236f908..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query65.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query65") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - s_store_name, - i_item_desc, - sc.revenue, - i_current_price, - i_wholesale_cost, - i_brand - from store, item, - (select ss_store_sk, avg(revenue) as ave - from - (select ss_store_sk, ss_item_sk, - sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sa - group by ss_store_sk) sb, - (select ss_store_sk, ss_item_sk, sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sc - where sb.ss_store_sk = sc.ss_store_sk and - sc.revenue <= 0.1 * sb.ave and - s_store_sk = sc.ss_store_sk and - i_item_sk = sc.ss_item_sk - order by s_store_name, i_item_desc -limit 100""" - qt_ds_shape_65 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query66.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query66.groovy deleted file mode 100644 index 3ad2fbecfa9cb7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query66.groovy +++ /dev/null @@ -1,260 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query66") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - ,sum(jan_sales) as jan_sales - ,sum(feb_sales) as feb_sales - ,sum(mar_sales) as mar_sales - ,sum(apr_sales) as apr_sales - ,sum(may_sales) as may_sales - ,sum(jun_sales) as jun_sales - ,sum(jul_sales) as jul_sales - ,sum(aug_sales) as aug_sales - ,sum(sep_sales) as sep_sales - ,sum(oct_sales) as oct_sales - ,sum(nov_sales) as nov_sales - ,sum(dec_sales) as dec_sales - ,sum(jan_sales/w_warehouse_sq_ft) as jan_sales_per_sq_foot - ,sum(feb_sales/w_warehouse_sq_ft) as feb_sales_per_sq_foot - ,sum(mar_sales/w_warehouse_sq_ft) as mar_sales_per_sq_foot - ,sum(apr_sales/w_warehouse_sq_ft) as apr_sales_per_sq_foot - ,sum(may_sales/w_warehouse_sq_ft) as may_sales_per_sq_foot - ,sum(jun_sales/w_warehouse_sq_ft) as jun_sales_per_sq_foot - ,sum(jul_sales/w_warehouse_sq_ft) as jul_sales_per_sq_foot - ,sum(aug_sales/w_warehouse_sq_ft) as aug_sales_per_sq_foot - ,sum(sep_sales/w_warehouse_sq_ft) as sep_sales_per_sq_foot - ,sum(oct_sales/w_warehouse_sq_ft) as oct_sales_per_sq_foot - ,sum(nov_sales/w_warehouse_sq_ft) as nov_sales_per_sq_foot - ,sum(dec_sales/w_warehouse_sq_ft) as dec_sales_per_sq_foot - ,sum(jan_net) as jan_net - ,sum(feb_net) as feb_net - ,sum(mar_net) as mar_net - ,sum(apr_net) as apr_net - ,sum(may_net) as may_net - ,sum(jun_net) as jun_net - ,sum(jul_net) as jul_net - ,sum(aug_net) as aug_net - ,sum(sep_net) as sep_net - ,sum(oct_net) as oct_net - ,sum(nov_net) as nov_net - ,sum(dec_net) as dec_net - from ( - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then ws_ext_sales_price* ws_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then ws_ext_sales_price* ws_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then ws_ext_sales_price* ws_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then ws_ext_sales_price* ws_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then ws_ext_sales_price* ws_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then ws_ext_sales_price* ws_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then ws_ext_sales_price* ws_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then ws_ext_sales_price* ws_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then ws_ext_sales_price* ws_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then ws_ext_sales_price* ws_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then ws_ext_sales_price* ws_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then ws_ext_sales_price* ws_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as dec_net - from - web_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - ws_warehouse_sk = w_warehouse_sk - and ws_sold_date_sk = d_date_sk - and ws_sold_time_sk = t_time_sk - and ws_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 and 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - union all - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then cs_ext_list_price* cs_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then cs_ext_list_price* cs_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then cs_ext_list_price* cs_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then cs_ext_list_price* cs_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then cs_ext_list_price* cs_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then cs_ext_list_price* cs_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then cs_ext_list_price* cs_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then cs_ext_list_price* cs_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then cs_ext_list_price* cs_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then cs_ext_list_price* cs_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then cs_ext_list_price* cs_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then cs_ext_list_price* cs_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as dec_net - from - catalog_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and cs_sold_time_sk = t_time_sk - and cs_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 AND 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - ) x - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - order by w_warehouse_name - limit 100""" - qt_ds_shape_66 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query67.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query67.groovy deleted file mode 100644 index 60725af0b19fce..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query67.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query67") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1206 and 1206+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100""" - qt_ds_shape_67 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query68.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query68.groovy deleted file mode 100644 index c2d21e2d218559..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query68.groovy +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query68") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 8 or - household_demographics.hd_vehicle_count= -1) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Pleasant Hill','Five Points') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100""" - qt_ds_shape_68 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query69.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query69.groovy deleted file mode 100644 index 4ce0907dd769b2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query69.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query69") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_state in ('TX','VA','MI') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - (not exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - not exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - limit 100""" - qt_ds_shape_69 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query7.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query7.groovy deleted file mode 100644 index 015f3504c6f0a5..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query7.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, item, promotion - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_cdemo_sk = cd_demo_sk and - ss_promo_sk = p_promo_sk and - cd_gender = 'F' and - cd_marital_status = 'W' and - cd_education_status = 'College' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100""" - qt_ds_shape_7 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query70.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query70.groovy deleted file mode 100644 index 34f11394e63dcf..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query70.groovy +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query70") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ss_net_profit) as total_sum - ,s_state - ,s_county - ,grouping(s_state)+grouping(s_county) as lochierarchy - ,rank() over ( - partition by grouping(s_state)+grouping(s_county), - case when grouping(s_county) = 0 then s_state end - order by sum(ss_net_profit) desc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,store - where - d1.d_month_seq between 1213 and 1213+11 - and d1.d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - and s_state in - ( select s_state - from (select s_state as s_state, - rank() over ( partition by s_state order by sum(ss_net_profit) desc) as ranking - from store_sales, store, date_dim - where d_month_seq between 1213 and 1213+11 - and d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - group by s_state - ) tmp1 - where ranking <= 5 - ) - group by rollup(s_state,s_county) - order by - lochierarchy desc - ,case when lochierarchy = 0 then s_state end - ,rank_within_parent - limit 100""" - qt_ds_shape_70 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query71.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query71.groovy deleted file mode 100644 index a6bd9c8884d29b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query71.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query71") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand,t_hour,t_minute, - sum(ext_price) ext_price - from item, (select ws_ext_sales_price as ext_price, - ws_sold_date_sk as sold_date_sk, - ws_item_sk as sold_item_sk, - ws_sold_time_sk as time_sk - from web_sales,date_dim - where d_date_sk = ws_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select cs_ext_sales_price as ext_price, - cs_sold_date_sk as sold_date_sk, - cs_item_sk as sold_item_sk, - cs_sold_time_sk as time_sk - from catalog_sales,date_dim - where d_date_sk = cs_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select ss_ext_sales_price as ext_price, - ss_sold_date_sk as sold_date_sk, - ss_item_sk as sold_item_sk, - ss_sold_time_sk as time_sk - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - and d_moy=12 - and d_year=1998 - ) tmp,time_dim - where - sold_item_sk = i_item_sk - and i_manager_id=1 - and time_sk = t_time_sk - and (t_meal_time = 'breakfast' or t_meal_time = 'dinner') - group by i_brand, i_brand_id,t_hour,t_minute - order by ext_price desc, i_brand_id - """ - qt_ds_shape_71 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query72.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query72.groovy deleted file mode 100644 index 6e7360ed7ddfd3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query72.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query72") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select /*+ SET_VAR(max_join_number_bushy_tree=10, memo_max_group_expression_size=15000)*/ i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and (d3.d_date > (d1.d_date + INTERVAL '5' DAY)) - and hd_buy_potential = '501-1000' - and d1.d_year = 2002 - and cd_marital_status = 'W' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100""" - qt_ds_shape_72 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query73.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query73.groovy deleted file mode 100644 index 5b02c9a994842e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query73.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query73") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_buy_potential = '501-1000' or - household_demographics.hd_buy_potential = 'Unknown') - and household_demographics.hd_vehicle_count > 0 - and case when household_demographics.hd_vehicle_count > 0 then - household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Fairfield County','Walker County','Daviess County','Barrow County') - group by ss_ticket_number,ss_customer_sk) dj,customer - where ss_customer_sk = c_customer_sk - and cnt between 1 and 5 - order by cnt desc, c_last_name asc""" - qt_ds_shape_73 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query74.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query74.groovy deleted file mode 100644 index 06cd139cb4e834..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query74.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query74") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ss_net_paid) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ws_net_paid) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - ) - select - t_s_secyear.customer_id, t_s_secyear.customer_first_name, t_s_secyear.customer_last_name - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.year = 1999 - and t_s_secyear.year = 1999+1 - and t_w_firstyear.year = 1999 - and t_w_secyear.year = 1999+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - order by 2,1,3 -limit 100""" - qt_ds_shape_74 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query75.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query75.groovy deleted file mode 100644 index 9397c42c127d46..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query75.groovy +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query75") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """WITH all_sales AS ( - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,SUM(sales_cnt) AS sales_cnt - ,SUM(sales_amt) AS sales_amt - FROM (SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,cs_quantity - COALESCE(cr_return_quantity,0) AS sales_cnt - ,cs_ext_sales_price - COALESCE(cr_return_amount,0.0) AS sales_amt - FROM catalog_sales JOIN item ON i_item_sk=cs_item_sk - JOIN date_dim ON d_date_sk=cs_sold_date_sk - LEFT JOIN catalog_returns ON (cs_order_number=cr_order_number - AND cs_item_sk=cr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ss_quantity - COALESCE(sr_return_quantity,0) AS sales_cnt - ,ss_ext_sales_price - COALESCE(sr_return_amt,0.0) AS sales_amt - FROM store_sales JOIN item ON i_item_sk=ss_item_sk - JOIN date_dim ON d_date_sk=ss_sold_date_sk - LEFT JOIN store_returns ON (ss_ticket_number=sr_ticket_number - AND ss_item_sk=sr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ws_quantity - COALESCE(wr_return_quantity,0) AS sales_cnt - ,ws_ext_sales_price - COALESCE(wr_return_amt,0.0) AS sales_amt - FROM web_sales JOIN item ON i_item_sk=ws_item_sk - JOIN date_dim ON d_date_sk=ws_sold_date_sk - LEFT JOIN web_returns ON (ws_order_number=wr_order_number - AND ws_item_sk=wr_item_sk) - WHERE i_category='Home') sales_detail - GROUP BY d_year, i_brand_id, i_class_id, i_category_id, i_manufact_id) - SELECT prev_yr.d_year AS prev_year - ,curr_yr.d_year AS year - ,curr_yr.i_brand_id - ,curr_yr.i_class_id - ,curr_yr.i_category_id - ,curr_yr.i_manufact_id - ,prev_yr.sales_cnt AS prev_yr_cnt - ,curr_yr.sales_cnt AS curr_yr_cnt - ,curr_yr.sales_cnt-prev_yr.sales_cnt AS sales_cnt_diff - ,curr_yr.sales_amt-prev_yr.sales_amt AS sales_amt_diff - FROM all_sales curr_yr, all_sales prev_yr - WHERE curr_yr.i_brand_id=prev_yr.i_brand_id - AND curr_yr.i_class_id=prev_yr.i_class_id - AND curr_yr.i_category_id=prev_yr.i_category_id - AND curr_yr.i_manufact_id=prev_yr.i_manufact_id - AND curr_yr.d_year=1999 - AND prev_yr.d_year=1999-1 - AND CAST(curr_yr.sales_cnt AS DECIMAL(17,2))/CAST(prev_yr.sales_cnt AS DECIMAL(17,2))<0.9 - ORDER BY sales_cnt_diff,sales_amt_diff - limit 100""" - qt_ds_shape_75 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query76.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query76.groovy deleted file mode 100644 index 9b0c5ca496c1b1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query76.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query76") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( - SELECT 'store' as channel, 'ss_hdemo_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price - FROM store_sales, item, date_dim - WHERE ss_hdemo_sk IS NULL - AND ss_sold_date_sk=d_date_sk - AND ss_item_sk=i_item_sk - UNION ALL - SELECT 'web' as channel, 'ws_bill_addr_sk' col_name, d_year, d_qoy, i_category, ws_ext_sales_price ext_sales_price - FROM web_sales, item, date_dim - WHERE ws_bill_addr_sk IS NULL - AND ws_sold_date_sk=d_date_sk - AND ws_item_sk=i_item_sk - UNION ALL - SELECT 'catalog' as channel, 'cs_warehouse_sk' col_name, d_year, d_qoy, i_category, cs_ext_sales_price ext_sales_price - FROM catalog_sales, item, date_dim - WHERE cs_warehouse_sk IS NULL - AND cs_sold_date_sk=d_date_sk - AND cs_item_sk=i_item_sk) foo -GROUP BY channel, col_name, d_year, d_qoy, i_category -ORDER BY channel, col_name, d_year, d_qoy, i_category -limit 100""" - qt_ds_shape_76 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query77.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query77.groovy deleted file mode 100644 index 72f7a552e13b3d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query77.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query77") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as - (select s_store_sk, - sum(ss_ext_sales_price) as sales, - sum(ss_net_profit) as profit - from store_sales, - date_dim, - store - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ss_store_sk = s_store_sk - group by s_store_sk) - , - sr as - (select s_store_sk, - sum(sr_return_amt) as returns, - sum(sr_net_loss) as profit_loss - from store_returns, - date_dim, - store - where sr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and sr_store_sk = s_store_sk - group by s_store_sk), - cs as - (select cs_call_center_sk, - sum(cs_ext_sales_price) as sales, - sum(cs_net_profit) as profit - from catalog_sales, - date_dim - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cs_call_center_sk - ), - cr as - (select cr_call_center_sk, - sum(cr_return_amount) as returns, - sum(cr_net_loss) as profit_loss - from catalog_returns, - date_dim - where cr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cr_call_center_sk - ), - ws as - ( select wp_web_page_sk, - sum(ws_ext_sales_price) as sales, - sum(ws_net_profit) as profit - from web_sales, - date_dim, - web_page - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ws_web_page_sk = wp_web_page_sk - group by wp_web_page_sk), - wr as - (select wp_web_page_sk, - sum(wr_return_amt) as returns, - sum(wr_net_loss) as profit_loss - from web_returns, - date_dim, - web_page - where wr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and wr_web_page_sk = wp_web_page_sk - group by wp_web_page_sk) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , ss.s_store_sk as id - , sales - , coalesce(returns, 0) as returns - , (profit - coalesce(profit_loss,0)) as profit - from ss left join sr - on ss.s_store_sk = sr.s_store_sk - union all - select 'catalog channel' as channel - , cs_call_center_sk as id - , sales - , returns - , (profit - profit_loss) as profit - from cs - , cr - union all - select 'web channel' as channel - , ws.wp_web_page_sk as id - , sales - , coalesce(returns, 0) returns - , (profit - coalesce(profit_loss,0)) as profit - from ws left join wr - on ws.wp_web_page_sk = wr.wp_web_page_sk - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_77 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query78.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query78.groovy deleted file mode 100644 index aa17385358d0d7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query78.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query78") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null - group by d_year, ss_item_sk, ss_customer_sk - ) - select -ss_item_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=2000 -order by - ss_item_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100""" - qt_ds_shape_78 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query79.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query79.groovy deleted file mode 100644 index 34f613e0974752..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query79.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query79") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,store.s_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (household_demographics.hd_dep_count = 5 or household_demographics.hd_vehicle_count > 4) - and date_dim.d_dow = 1 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_number_employees between 200 and 295 - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer - where ss_customer_sk = c_customer_sk - order by c_last_name,c_first_name,substr(s_city,1,30), profit -limit 100""" - qt_ds_shape_79 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query8.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query8.groovy deleted file mode 100644 index 5fbbac6e3d11fd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query8.groovy +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100""" - qt_ds_shape_8 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query80.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query80.groovy deleted file mode 100644 index 6fadaafdb42a4a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query80.groovy +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query80") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssr as - (select s_store_id as store_id, - sum(ss_ext_sales_price) as sales, - sum(coalesce(sr_return_amt, 0)) as returns, - sum(ss_net_profit - coalesce(sr_net_loss, 0)) as profit - from store_sales left outer join store_returns on - (ss_item_sk = sr_item_sk and ss_ticket_number = sr_ticket_number), - date_dim, - store, - item, - promotion - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ss_store_sk = s_store_sk - and ss_item_sk = i_item_sk - and i_current_price > 50 - and ss_promo_sk = p_promo_sk - and p_channel_tv = 'N' - group by s_store_id) - , - csr as - (select cp_catalog_page_id as catalog_page_id, - sum(cs_ext_sales_price) as sales, - sum(coalesce(cr_return_amount, 0)) as returns, - sum(cs_net_profit - coalesce(cr_net_loss, 0)) as profit - from catalog_sales left outer join catalog_returns on - (cs_item_sk = cr_item_sk and cs_order_number = cr_order_number), - date_dim, - catalog_page, - item, - promotion - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and cs_catalog_page_sk = cp_catalog_page_sk - and cs_item_sk = i_item_sk - and i_current_price > 50 - and cs_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(ws_ext_sales_price) as sales, - sum(coalesce(wr_return_amt, 0)) as returns, - sum(ws_net_profit - coalesce(wr_net_loss, 0)) as profit - from web_sales left outer join web_returns on - (ws_item_sk = wr_item_sk and ws_order_number = wr_order_number), - date_dim, - web_site, - item, - promotion - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ws_web_site_sk = web_site_sk - and ws_item_sk = i_item_sk - and i_current_price > 50 - and ws_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', store_id) as id - , sales - , returns - , profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', catalog_page_id) as id - , sales - , returns - , profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) as id - , sales - , returns - , profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_80 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query81.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query81.groovy deleted file mode 100644 index c7233532657d5a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query81.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query81") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as - (select cr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(cr_return_amt_inc_tax) as ctr_total_return - from catalog_returns - ,date_dim - ,customer_address - where cr_returned_date_sk = d_date_sk - and d_year =2002 - and cr_returning_addr_sk = ca_address_sk - group by cr_returning_customer_sk - ,ca_state ) - select c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'CA' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - limit 100""" - qt_ds_shape_81 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query82.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query82.groovy deleted file mode 100644 index 94ddfc873b1053..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query82.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query82") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, store_sales - where i_current_price between 17 and 17+30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-07-09' as date) and (cast('1999-07-09' as date) + interval 60 day) - and i_manufact_id in (639,169,138,339) - and inv_quantity_on_hand between 100 and 500 - and ss_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100""" - qt_ds_shape_82 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query83.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query83.groovy deleted file mode 100644 index d4696ed1a2350e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query83.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query83") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with sr_items as - (select i_item_id item_id, - sum(sr_return_quantity) sr_item_qty - from store_returns, - item, - date_dim - where sr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and sr_returned_date_sk = d_date_sk - group by i_item_id), - cr_items as - (select i_item_id item_id, - sum(cr_return_quantity) cr_item_qty - from catalog_returns, - item, - date_dim - where cr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and cr_returned_date_sk = d_date_sk - group by i_item_id), - wr_items as - (select i_item_id item_id, - sum(wr_return_quantity) wr_item_qty - from web_returns, - item, - date_dim - where wr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and wr_returned_date_sk = d_date_sk - group by i_item_id) - select sr_items.item_id - ,sr_item_qty - ,sr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 sr_dev - ,cr_item_qty - ,cr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 cr_dev - ,wr_item_qty - ,wr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 wr_dev - ,(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 average - from sr_items - ,cr_items - ,wr_items - where sr_items.item_id=cr_items.item_id - and sr_items.item_id=wr_items.item_id - order by sr_items.item_id - ,sr_item_qty - limit 100""" - qt_ds_shape_83 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query84.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query84.groovy deleted file mode 100644 index 1cea65929d1ea0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query84.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query84") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_customer_id as customer_id - , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername - from customer - ,customer_address - ,customer_demographics - ,household_demographics - ,income_band - ,store_returns - where ca_city = 'Oakwood' - and c_current_addr_sk = ca_address_sk - and ib_lower_bound >= 5806 - and ib_upper_bound <= 5806 + 50000 - and ib_income_band_sk = hd_income_band_sk - and cd_demo_sk = c_current_cdemo_sk - and hd_demo_sk = c_current_hdemo_sk - and sr_cdemo_sk = cd_demo_sk - order by c_customer_id - limit 100""" - qt_ds_shape_84 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query85.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query85.groovy deleted file mode 100644 index 5455c49f9d813c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query85.groovy +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query85") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) - from web_sales, web_returns, web_page, customer_demographics cd1, - customer_demographics cd2, customer_address, date_dim, reason - where ws_web_page_sk = wp_web_page_sk - and ws_item_sk = wr_item_sk - and ws_order_number = wr_order_number - and ws_sold_date_sk = d_date_sk and d_year = 2000 - and cd1.cd_demo_sk = wr_refunded_cdemo_sk - and cd2.cd_demo_sk = wr_returning_cdemo_sk - and ca_address_sk = wr_refunded_addr_sk - and r_reason_sk = wr_reason_sk - and - ( - ( - cd1.cd_marital_status = 'M' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = '4 yr Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 100.00 and 150.00 - ) - or - ( - cd1.cd_marital_status = 'S' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Secondary' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 50.00 and 100.00 - ) - or - ( - cd1.cd_marital_status = 'W' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Advanced Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ca_country = 'United States' - and - ca_state in ('FL', 'TX', 'DE') - and ws_net_profit between 100 and 200 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('IN', 'ND', 'ID') - and ws_net_profit between 150 and 300 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('MT', 'IL', 'OH') - and ws_net_profit between 50 and 250 - ) - ) -group by r_reason_desc -order by substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) -limit 100""" - qt_ds_shape_85 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query86.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query86.groovy deleted file mode 100644 index 9ba88449ca98f7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query86.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query86") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ws_net_paid) as total_sum - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ws_net_paid) desc) as rank_within_parent - from - web_sales - ,date_dim d1 - ,item - where - d1.d_month_seq between 1224 and 1224+11 - and d1.d_date_sk = ws_sold_date_sk - and i_item_sk = ws_item_sk - group by rollup(i_category,i_class) - order by - lochierarchy desc, - case when lochierarchy = 0 then i_category end, - rank_within_parent - limit 100""" - qt_ds_shape_86 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query87.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query87.groovy deleted file mode 100644 index 391627f33adaac..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query87.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query87") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select count(*) -from ((select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) -) cool_cust -""" - qt_ds_shape_87 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query88.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query88.groovy deleted file mode 100644 index f7a330a6963eea..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query88.groovy +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query88") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from - (select count(*) h8_30_to_9 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s1, - (select count(*) h9_to_9_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s2, - (select count(*) h9_30_to_10 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s3, - (select count(*) h10_to_10_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s4, - (select count(*) h10_30_to_11 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s5, - (select count(*) h11_to_11_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s6, - (select count(*) h11_30_to_12 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s7, - (select count(*) h12_to_12_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 12 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s8 -""" - qt_ds_shape_88 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query89.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query89.groovy deleted file mode 100644 index b0f23ecd8a66b1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query89.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query89") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from( -select i_category, i_class, i_brand, - s_store_name, s_company_name, - d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, s_store_name, s_company_name) - avg_monthly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - d_year in (1999) and - ((i_category in ('Jewelry','Shoes','Electronics') and - i_class in ('semi-precious','athletic','portable') - ) - or (i_category in ('Men','Music','Women') and - i_class in ('accessories','rock','maternity') - )) -group by i_category, i_class, i_brand, - s_store_name, s_company_name, d_moy) tmp1 -where case when (avg_monthly_sales <> 0) then (abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 -order by sum_sales - avg_monthly_sales, s_store_name -limit 100""" - qt_ds_shape_89 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query9.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query9.groovy deleted file mode 100644 index 76f5a7ed84c454..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query9.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_parallel_result_sink=false;" - - def ds = """select case when (select count(*) - from store_sales - where ss_quantity between 1 and 20) > 2972190 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 1 and 20) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 1 and 20) end bucket1 , - case when (select count(*) - from store_sales - where ss_quantity between 21 and 40) > 4505785 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 21 and 40) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 21 and 40) end bucket2, - case when (select count(*) - from store_sales - where ss_quantity between 41 and 60) > 1575726 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 41 and 60) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 41 and 60) end bucket3, - case when (select count(*) - from store_sales - where ss_quantity between 61 and 80) > 3188917 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 61 and 80) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 61 and 80) end bucket4, - case when (select count(*) - from store_sales - where ss_quantity between 81 and 100) > 3525216 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 81 and 100) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 81 and 100) end bucket5 -from reason -where r_reason_sk = 1 -""" - qt_ds_shape_9 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query90.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query90.groovy deleted file mode 100644 index 5023a13e1f25dc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query90.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query90") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio - from ( select count(*) amc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 10 and 10+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) at, - ( select count(*) pmc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 16 and 16+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) pt - order by am_pm_ratio - limit 100""" - qt_ds_shape_90 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query91.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query91.groovy deleted file mode 100644 index 199e374915a32b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query91.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query91") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2001 -and d_moy = 11 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like '1001-5000%' -and ca_gmt_offset = -6 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc""" - qt_ds_shape_91 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query92.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query92.groovy deleted file mode 100644 index a8d060e557c4cd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query92.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query92") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ws_ext_discount_amt) as "Excess Discount Amount" -from - web_sales - ,item - ,date_dim -where -i_manufact_id = 320 -and i_item_sk = ws_item_sk -and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) -and d_date_sk = ws_sold_date_sk -and ws_ext_discount_amt - > ( - SELECT - 1.3 * avg(ws_ext_discount_amt) - FROM - web_sales - ,date_dim - WHERE - ws_item_sk = i_item_sk - and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) - and d_date_sk = ws_sold_date_sk - ) -order by sum(ws_ext_discount_amt) -limit 100""" - qt_ds_shape_92 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query93.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query93.groovy deleted file mode 100644 index fcbb8872980f5d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query93.groovy +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query93") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ss_customer_sk - ,sum(act_sales) sumsales - from (select ss_item_sk - ,ss_ticket_number - ,ss_customer_sk - ,case when sr_return_quantity is not null then (ss_quantity-sr_return_quantity)*ss_sales_price - else (ss_quantity*ss_sales_price) end act_sales - from store_sales left outer join store_returns on (sr_item_sk = ss_item_sk - and sr_ticket_number = ss_ticket_number) - ,reason - where sr_reason_sk = r_reason_sk - and r_reason_desc = 'duplicate purchase') t - group by ss_customer_sk - order by sumsales, ss_customer_sk -limit 100""" - qt_ds_shape_93 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query94.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query94.groovy deleted file mode 100644 index 81e63de5b22b05..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query94.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query94") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2000-2-01' and - (cast('2000-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'OK' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and exists (select * - from web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) -and not exists(select * - from web_returns wr1 - where ws1.ws_order_number = wr1.wr_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_94 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query95.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query95.groovy deleted file mode 100644 index be8b36d1c9c253..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query95.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query95") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '1999-2-01' and - (cast('1999-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'NC' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_95 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query96.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query96.groovy deleted file mode 100644 index c3d1721650f207..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query96.groovy +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query96") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select count(*) -from store_sales - ,household_demographics - ,time_dim, store -where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and household_demographics.hd_dep_count = 3 - and store.s_store_name = 'ese' -order by count(*) -limit 100""" - qt_ds_shape_96 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query97.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query97.groovy deleted file mode 100644 index 5ea649b1046797..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query97.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query97") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """with ssci as ( -select ss_customer_sk customer_sk - ,ss_item_sk item_sk -from store_sales,date_dim -where ss_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by ss_customer_sk - ,ss_item_sk), -csci as( - select cs_bill_customer_sk customer_sk - ,cs_item_sk item_sk -from catalog_sales,date_dim -where cs_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by cs_bill_customer_sk - ,cs_item_sk) - select sum(case when ssci.customer_sk is not null and csci.customer_sk is null then 1 else 0 end) store_only - ,sum(case when ssci.customer_sk is null and csci.customer_sk is not null then 1 else 0 end) catalog_only - ,sum(case when ssci.customer_sk is not null and csci.customer_sk is not null then 1 else 0 end) store_and_catalog -from ssci full outer join csci on (ssci.customer_sk=csci.customer_sk - and ssci.item_sk = csci.item_sk) -limit 100""" - qt_ds_shape_97 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query98.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query98.groovy deleted file mode 100644 index ac78b3adcf5da7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query98.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query98") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ss_ext_sales_price) as itemrevenue - ,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over - (partition by i_class) as revenueratio -from - store_sales - ,item - ,date_dim -where - ss_item_sk = i_item_sk - and i_category in ('Sports', 'Music', 'Shoes') - and ss_sold_date_sk = d_date_sk - and d_date between cast('2002-05-20' as date) - and (cast('2002-05-20' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio""" - qt_ds_shape_98 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query99.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query99.groovy deleted file mode 100644 index 1e867ca4987f0f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query99.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query99") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 30) and - (cs_ship_date_sk - cs_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 60) and - (cs_ship_date_sk - cs_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 90) and - (cs_ship_date_sk - cs_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - catalog_sales - ,warehouse - ,ship_mode - ,call_center - ,date_dim -where - d_month_seq between 1224 and 1224 + 11 -and cs_ship_date_sk = d_date_sk -and cs_warehouse_sk = w_warehouse_sk -and cs_ship_mode_sk = sm_ship_mode_sk -and cs_call_center_sk = cc_call_center_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -limit 100""" - qt_ds_shape_99 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query1.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query1.groovy deleted file mode 100644 index cdaa360eafaacd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query1.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'SD' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100""" - qt_ds_shape_1 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query10.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query10.groovy deleted file mode 100644 index 7e492e1ec8cec8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query10.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Storey County','Marquette County','Warren County','Cochran County','Kandiyohi County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 ANd 1+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count -limit 100""" - qt_ds_shape_10 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query11.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query11.groovy deleted file mode 100644 index ab874a3853f8da..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query11.groovy +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ss_ext_list_price-ss_ext_discount_amt) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ws_ext_list_price-ws_ext_discount_amt) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 2001 - and t_s_secyear.dyear = 2001+1 - and t_w_firstyear.dyear = 2001 - and t_w_secyear.dyear = 2001+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else 0.0 end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else 0.0 end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100""" - qt_ds_shape_11 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query12.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query12.groovy deleted file mode 100644 index 6fc3a9149599d4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query12.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ws_ext_sales_price) as itemrevenue - ,sum(ws_ext_sales_price)*100/sum(sum(ws_ext_sales_price)) over - (partition by i_class) as revenueratio -from - web_sales - ,item - ,date_dim -where - ws_item_sk = i_item_sk - and i_category in ('Books', 'Sports', 'Men') - and ws_sold_date_sk = d_date_sk - and d_date between cast('1998-04-06' as date) - and (cast('1998-04-06' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100""" - qt_ds_shape_12 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query13.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query13.groovy deleted file mode 100644 index 152c4132629c63..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query13.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Unknown' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'S' - and cd_education_status = 'College' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = '4 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('SD', 'KS', 'MI') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MO', 'ND', 'CO') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('NH', 'OH', 'TX') - and ss_net_profit between 50 and 250 - )) -""" - qt_ds_shape_13 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query14.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query14.groovy deleted file mode 100644 index c621b72b18c4e4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query14.groovy +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with cross_items as - (select i_item_sk ss_item_sk - from item, - (select iss.i_brand_id brand_id - ,iss.i_class_id class_id - ,iss.i_category_id category_id - from store_sales - ,item iss - ,date_dim d1 - where ss_item_sk = iss.i_item_sk - and ss_sold_date_sk = d1.d_date_sk - and d1.d_year between 2000 AND 2000 + 2 - intersect - select ics.i_brand_id - ,ics.i_class_id - ,ics.i_category_id - from catalog_sales - ,item ics - ,date_dim d2 - where cs_item_sk = ics.i_item_sk - and cs_sold_date_sk = d2.d_date_sk - and d2.d_year between 2000 AND 2000 + 2 - intersect - select iws.i_brand_id - ,iws.i_class_id - ,iws.i_category_id - from web_sales - ,item iws - ,date_dim d3 - where ws_item_sk = iws.i_item_sk - and ws_sold_date_sk = d3.d_date_sk - and d3.d_year between 2000 AND 2000 + 2) - t where i_brand_id = brand_id - and i_class_id = class_id - and i_category_id = category_id -), - avg_sales as - (select avg(quantity*list_price) average_sales - from (select ss_quantity quantity - ,ss_list_price list_price - from store_sales - ,date_dim - where ss_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select cs_quantity quantity - ,cs_list_price list_price - from catalog_sales - ,date_dim - where cs_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select ws_quantity quantity - ,ws_list_price list_price - from web_sales - ,date_dim - where ws_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2) x) - select channel, i_brand_id,i_class_id,i_category_id,sum(sales), sum(number_sales) - from( - select 'store' channel, i_brand_id,i_class_id - ,i_category_id,sum(ss_quantity*ss_list_price) sales - , count(*) number_sales - from store_sales - ,item - ,date_dim - where ss_item_sk in (select ss_item_sk from cross_items) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales) - union all - select 'catalog' channel, i_brand_id,i_class_id,i_category_id, sum(cs_quantity*cs_list_price) sales, count(*) number_sales - from catalog_sales - ,item - ,date_dim - where cs_item_sk in (select ss_item_sk from cross_items) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(cs_quantity*cs_list_price) > (select average_sales from avg_sales) - union all - select 'web' channel, i_brand_id,i_class_id,i_category_id, sum(ws_quantity*ws_list_price) sales , count(*) number_sales - from web_sales - ,item - ,date_dim - where ws_item_sk in (select ss_item_sk from cross_items) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ws_quantity*ws_list_price) > (select average_sales from avg_sales) - ) y - group by rollup (channel, i_brand_id,i_class_id,i_category_id) - order by channel,i_brand_id,i_class_id,i_category_id - limit 100""" - qt_ds_shape_14 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query15.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query15.groovy deleted file mode 100644 index 48dc7923ebaa3f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query15.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ca_zip - ,sum(cs_sales_price) - from catalog_sales - ,customer - ,customer_address - ,date_dim - where cs_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', - '85392', '85460', '80348', '81792') - or ca_state in ('CA','WA','GA') - or cs_sales_price > 500) - and cs_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2001 - group by ca_zip - order by ca_zip - limit 100""" - qt_ds_shape_15 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query16.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query16.groovy deleted file mode 100644 index 16103efe13173f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query16.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - count(distinct cs_order_number) as "order count" - ,sum(cs_ext_ship_cost) as "total shipping cost" - ,sum(cs_net_profit) as "total net profit" -from - catalog_sales cs1 - ,date_dim - ,customer_address - ,call_center -where - d_date between '2002-4-01' and - (cast('2002-4-01' as date) + interval 60 day) -and cs1.cs_ship_date_sk = d_date_sk -and cs1.cs_ship_addr_sk = ca_address_sk -and ca_state = 'WV' -and cs1.cs_call_center_sk = cc_call_center_sk -and cc_county in ('Ziebach County','Luce County','Richland County','Daviess County', - 'Barrow County' -) -and exists (select * - from catalog_sales cs2 - where cs1.cs_order_number = cs2.cs_order_number - and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) -and not exists(select * - from catalog_returns cr1 - where cs1.cs_order_number = cr1.cr_order_number) -order by count(distinct cs_order_number) -limit 100""" - qt_ds_shape_16 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query17.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query17.groovy deleted file mode 100644 index 5cafc500be9674..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query17.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,s_state - ,count(ss_quantity) as store_sales_quantitycount - ,avg(ss_quantity) as store_sales_quantityave - ,stddev_samp(ss_quantity) as store_sales_quantitystdev - ,stddev_samp(ss_quantity)/avg(ss_quantity) as store_sales_quantitycov - ,count(sr_return_quantity) as store_returns_quantitycount - ,avg(sr_return_quantity) as store_returns_quantityave - ,stddev_samp(sr_return_quantity) as store_returns_quantitystdev - ,stddev_samp(sr_return_quantity)/avg(sr_return_quantity) as store_returns_quantitycov - ,count(cs_quantity) as catalog_sales_quantitycount ,avg(cs_quantity) as catalog_sales_quantityave - ,stddev_samp(cs_quantity) as catalog_sales_quantitystdev - ,stddev_samp(cs_quantity)/avg(cs_quantity) as catalog_sales_quantitycov - from store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where d1.d_quarter_name = '2001Q1' - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - group by i_item_id - ,i_item_desc - ,s_state - order by i_item_id - ,i_item_desc - ,s_state -limit 100""" - qt_ds_shape_17 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query18.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query18.groovy deleted file mode 100644 index 42bc9b85e8a581..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query18.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - ca_country, - ca_state, - ca_county, - avg( cast(cs_quantity as decimal(12,2))) agg1, - avg( cast(cs_list_price as decimal(12,2))) agg2, - avg( cast(cs_coupon_amt as decimal(12,2))) agg3, - avg( cast(cs_sales_price as decimal(12,2))) agg4, - avg( cast(cs_net_profit as decimal(12,2))) agg5, - avg( cast(c_birth_year as decimal(12,2))) agg6, - avg( cast(cd1.cd_dep_count as decimal(12,2))) agg7 - from catalog_sales, customer_demographics cd1, - customer_demographics cd2, customer, customer_address, date_dim, item - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd1.cd_demo_sk and - cs_bill_customer_sk = c_customer_sk and - cd1.cd_gender = 'F' and - cd1.cd_education_status = 'Advanced Degree' and - c_current_cdemo_sk = cd2.cd_demo_sk and - c_current_addr_sk = ca_address_sk and - c_birth_month in (10,7,8,4,1,2) and - d_year = 1998 and - ca_state in ('WA','GA','NC' - ,'ME','WY','OK','IN') - group by rollup (i_item_id, ca_country, ca_state, ca_county) - order by ca_country, - ca_state, - ca_county, - i_item_id - limit 100""" - qt_ds_shape_18 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query19.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query19.groovy deleted file mode 100644 index 102142a9975b04..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query19.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=2 - and d_moy=12 - and d_year=1999 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 """ - qt_ds_shape_19 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query2.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query2.groovy deleted file mode 100644 index c8afcaca91ae45..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query2.groovy +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with wscs as - (select sold_date_sk - ,sales_price - from (select ws_sold_date_sk sold_date_sk - ,ws_ext_sales_price sales_price - from web_sales - union all - select cs_sold_date_sk sold_date_sk - ,cs_ext_sales_price sales_price - from catalog_sales) t), - wswscs as - (select d_week_seq, - sum(case when (d_day_name='Sunday') then sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then sales_price else null end) sat_sales - from wscs - ,date_dim - where d_date_sk = sold_date_sk - group by d_week_seq) - select d_week_seq1 - ,round(sun_sales1/sun_sales2,2) - ,round(mon_sales1/mon_sales2,2) - ,round(tue_sales1/tue_sales2,2) - ,round(wed_sales1/wed_sales2,2) - ,round(thu_sales1/thu_sales2,2) - ,round(fri_sales1/fri_sales2,2) - ,round(sat_sales1/sat_sales2,2) - from - (select wswscs.d_week_seq d_week_seq1 - ,sun_sales sun_sales1 - ,mon_sales mon_sales1 - ,tue_sales tue_sales1 - ,wed_sales wed_sales1 - ,thu_sales thu_sales1 - ,fri_sales fri_sales1 - ,sat_sales sat_sales1 - from wswscs,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998) y, - (select wswscs.d_week_seq d_week_seq2 - ,sun_sales sun_sales2 - ,mon_sales mon_sales2 - ,tue_sales tue_sales2 - ,wed_sales wed_sales2 - ,thu_sales thu_sales2 - ,fri_sales fri_sales2 - ,sat_sales sat_sales2 - from wswscs - ,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998+1) z - where d_week_seq1=d_week_seq2-53 - order by d_week_seq1""" - qt_ds_shape_2 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query20.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query20.groovy deleted file mode 100644 index e64fd0a4d64957..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query20.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(cs_ext_sales_price) as itemrevenue - ,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over - (partition by i_class) as revenueratio - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and i_category in ('Shoes', 'Books', 'Women') - and cs_sold_date_sk = d_date_sk - and d_date between cast('2002-01-26' as date) - and (cast('2002-01-26' as date) + interval 30 day) - group by i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - order by i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100""" - qt_ds_shape_20 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query21.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query21.groovy deleted file mode 100644 index 8c0d46200b8da3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query21.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'SET enable_fold_constant_by_be = false' //plan shape will be different - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * - from(select w_warehouse_name - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_before - ,sum(case when (cast(d_date as date) >= cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_after - from inventory - ,warehouse - ,item - ,date_dim - where i_current_price between 0.99 and 1.49 - and i_item_sk = inv_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_date between (cast ('2002-02-27' as date) - interval 30 day) - and (cast ('2002-02-27' as date) + interval 30 day) - group by w_warehouse_name, i_item_id) x - where (case when inv_before > 0 - then inv_after / inv_before - else null - end) between 2.0/3.0 and 3.0/2.0 - order by w_warehouse_name - ,i_item_id - limit 100""" - qt_ds_shape_21 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query22.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query22.groovy deleted file mode 100644 index ab0afa82a7f8b1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query22.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_product_name - ,i_brand - ,i_class - ,i_category - ,avg(inv_quantity_on_hand) qoh - from inventory - ,date_dim - ,item - where inv_date_sk=d_date_sk - and inv_item_sk=i_item_sk - and d_month_seq between 1188 and 1188 + 11 - group by rollup(i_product_name - ,i_brand - ,i_class - ,i_category) -order by qoh, i_product_name, i_brand, i_class, i_category -limit 100""" - qt_ds_shape_22 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query23.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query23.groovy deleted file mode 100644 index b09ccf2079a525..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query23.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), - best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100""" - qt_ds_shape_23 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query24.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query24.groovy deleted file mode 100644 index 7f8d9159cd3ab2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query24.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssales as -(select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_profit) netpaid -from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address -where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip -and s_market_id=8 -group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) -select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid -from ssales -where i_color = 'beige' -group by c_last_name - ,c_first_name - ,s_store_name -having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) -order by c_last_name - ,c_first_name - ,s_store_name -""" - qt_ds_shape_24 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query25.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query25.groovy deleted file mode 100644 index 7d6b7930cd82f2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query25.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query25") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,sum(ss_net_profit) as store_sales_profit - ,sum(sr_net_loss) as store_returns_loss - ,sum(cs_net_profit) as catalog_sales_profit - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 2000 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 10 - and d2.d_year = 2000 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_moy between 4 and 10 - and d3.d_year = 2000 - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100""" - qt_ds_shape_25 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query26.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query26.groovy deleted file mode 100644 index a269a64d6600b9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query26.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query26") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - avg(cs_quantity) agg1, - avg(cs_list_price) agg2, - avg(cs_coupon_amt) agg3, - avg(cs_sales_price) agg4 - from catalog_sales, customer_demographics, date_dim, item, promotion - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd_demo_sk and - cs_promo_sk = p_promo_sk and - cd_gender = 'M' and - cd_marital_status = 'S' and - cd_education_status = 'Unknown' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100""" - qt_ds_shape_26 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query27.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query27.groovy deleted file mode 100644 index e02f0e1baf82f2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query27.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query27") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - s_state, grouping(s_state) g_state, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, store, item - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_store_sk = s_store_sk and - ss_cdemo_sk = cd_demo_sk and - cd_gender = 'F' and - cd_marital_status = 'D' and - cd_education_status = 'Secondary' and - d_year = 1999 and - s_state in ('MO','AL', 'MI', 'TN', 'LA', 'SC') - group by rollup (i_item_id, s_state) - order by i_item_id - ,s_state - limit 100""" - qt_ds_shape_27 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query28.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query28.groovy deleted file mode 100644 index cbd3938230d209..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query28.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query28") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select avg(ss_list_price) B1_LP - ,count(ss_list_price) B1_CNT - ,count(distinct ss_list_price) B1_CNTD - from store_sales - where ss_quantity between 0 and 5 - and (ss_list_price between 131 and 131+10 - or ss_coupon_amt between 16798 and 16798+1000 - or ss_wholesale_cost between 25 and 25+20)) B1, - (select avg(ss_list_price) B2_LP - ,count(ss_list_price) B2_CNT - ,count(distinct ss_list_price) B2_CNTD - from store_sales - where ss_quantity between 6 and 10 - and (ss_list_price between 145 and 145+10 - or ss_coupon_amt between 14792 and 14792+1000 - or ss_wholesale_cost between 46 and 46+20)) B2, - (select avg(ss_list_price) B3_LP - ,count(ss_list_price) B3_CNT - ,count(distinct ss_list_price) B3_CNTD - from store_sales - where ss_quantity between 11 and 15 - and (ss_list_price between 150 and 150+10 - or ss_coupon_amt between 6600 and 6600+1000 - or ss_wholesale_cost between 9 and 9+20)) B3, - (select avg(ss_list_price) B4_LP - ,count(ss_list_price) B4_CNT - ,count(distinct ss_list_price) B4_CNTD - from store_sales - where ss_quantity between 16 and 20 - and (ss_list_price between 91 and 91+10 - or ss_coupon_amt between 13493 and 13493+1000 - or ss_wholesale_cost between 36 and 36+20)) B4, - (select avg(ss_list_price) B5_LP - ,count(ss_list_price) B5_CNT - ,count(distinct ss_list_price) B5_CNTD - from store_sales - where ss_quantity between 21 and 25 - and (ss_list_price between 0 and 0+10 - or ss_coupon_amt between 7629 and 7629+1000 - or ss_wholesale_cost between 6 and 6+20)) B5, - (select avg(ss_list_price) B6_LP - ,count(ss_list_price) B6_CNT - ,count(distinct ss_list_price) B6_CNTD - from store_sales - where ss_quantity between 26 and 30 - and (ss_list_price between 89 and 89+10 - or ss_coupon_amt between 15257 and 15257+1000 - or ss_wholesale_cost between 31 and 31+20)) B6 -limit 100""" - qt_ds_shape_28 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query29.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query29.groovy deleted file mode 100644 index a8c46eb13c7b70..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query29.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query29") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,avg(ss_quantity) as store_sales_quantity - ,avg(sr_return_quantity) as store_returns_quantity - ,avg(cs_quantity) as catalog_sales_quantity - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1999 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 4 + 3 - and d2.d_year = 1999 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_year in (1999,1999+1,1999+2) - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100""" - qt_ds_shape_29 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query3.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query3.groovy deleted file mode 100644 index 2030616e958181..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query3.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_sales_price) sum_agg - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manufact_id = 816 - and dt.d_moy=11 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,sum_agg desc - ,brand_id - limit 100""" - qt_ds_shape_3 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query30.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query30.groovy deleted file mode 100644 index 301ac521b84480..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query30.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query30") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as - (select wr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(wr_return_amt) as ctr_total_return - from web_returns - ,date_dim - ,customer_address - where wr_returned_date_sk = d_date_sk - and d_year =2002 - and wr_returning_addr_sk = ca_address_sk - group by wr_returning_customer_sk - ,ca_state) - select c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'IN' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return -limit 100""" - qt_ds_shape_30 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query31.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query31.groovy deleted file mode 100644 index 607dc93051d195..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query31.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query31") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as - (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales - from store_sales,date_dim,customer_address - where ss_sold_date_sk = d_date_sk - and ss_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year), - ws as - (select ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales - from web_sales,date_dim,customer_address - where ws_sold_date_sk = d_date_sk - and ws_bill_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year) - select - ss1.ca_county - ,ss1.d_year - ,ws2.web_sales/ws1.web_sales web_q1_q2_increase - ,ss2.store_sales/ss1.store_sales store_q1_q2_increase - ,ws3.web_sales/ws2.web_sales web_q2_q3_increase - ,ss3.store_sales/ss2.store_sales store_q2_q3_increase - from - ss ss1 - ,ss ss2 - ,ss ss3 - ,ws ws1 - ,ws ws2 - ,ws ws3 - where - ss1.d_qoy = 1 - and ss1.d_year = 2000 - and ss1.ca_county = ss2.ca_county - and ss2.d_qoy = 2 - and ss2.d_year = 2000 - and ss2.ca_county = ss3.ca_county - and ss3.d_qoy = 3 - and ss3.d_year = 2000 - and ss1.ca_county = ws1.ca_county - and ws1.d_qoy = 1 - and ws1.d_year = 2000 - and ws1.ca_county = ws2.ca_county - and ws2.d_qoy = 2 - and ws2.d_year = 2000 - and ws1.ca_county = ws3.ca_county - and ws3.d_qoy = 3 - and ws3.d_year =2000 - and case when ws1.web_sales > 0 then ws2.web_sales/ws1.web_sales else null end - > case when ss1.store_sales > 0 then ss2.store_sales/ss1.store_sales else null end - and case when ws2.web_sales > 0 then ws3.web_sales/ws2.web_sales else null end - > case when ss2.store_sales > 0 then ss3.store_sales/ss2.store_sales else null end - order by web_q1_q2_increase""" - qt_ds_shape_31 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query32.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query32.groovy deleted file mode 100644 index e90fa68b73443a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query32.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query32") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """select sum(cs_ext_discount_amt) as "excess discount amount" -from - catalog_sales - ,item - ,date_dim -where -i_manufact_id = 29 -and i_item_sk = cs_item_sk -and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) -and d_date_sk = cs_sold_date_sk -and cs_ext_discount_amt - > ( - select - 1.3 * avg(cs_ext_discount_amt) - from - catalog_sales - ,date_dim - where - cs_item_sk = i_item_sk - and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) - and d_date_sk = cs_sold_date_sk - ) -limit 100""" - qt_ds_shape_32 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query33.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query33.groovy deleted file mode 100644 index b09523f54b7d46..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query33.groovy +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query33") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select - i_manufact_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - cs as ( - select - i_manufact_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - ws as ( - select - i_manufact_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id) - select i_manufact_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_manufact_id - order by total_sales -limit 100""" - qt_ds_shape_33 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query34.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query34.groovy deleted file mode 100644 index 6e61686781f9ff..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query34.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query34") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '0-500') - and household_demographics.hd_vehicle_count > 0 - and (case when household_demographics.hd_vehicle_count > 0 - then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count - else null - end) > 1.2 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_county in ('Ziebach County','Daviess County','Walker County','Richland County', - 'Barrow County','Franklin Parish','Williamson County','Luce County') - group by ss_ticket_number,ss_customer_sk) dn,customer - where ss_customer_sk = c_customer_sk - and cnt between 15 and 20 - order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number""" - qt_ds_shape_34 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query35.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query35.groovy deleted file mode 100644 index 0359da335facba..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query35.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query35") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - count(*) cnt1, - max(cd_dep_count), - sum(cd_dep_count), - max(cd_dep_count), - cd_dep_employed_count, - count(*) cnt2, - max(cd_dep_employed_count), - sum(cd_dep_employed_count), - max(cd_dep_employed_count), - cd_dep_college_count, - count(*) cnt3, - max(cd_dep_college_count), - sum(cd_dep_college_count), - max(cd_dep_college_count) - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4)) - group by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100""" - qt_ds_shape_35 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query36.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query36.groovy deleted file mode 100644 index 80a114afc269a8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query36.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query36") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ss_net_profit)/sum(ss_ext_sales_price) asc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,item - ,store - where - d1.d_year = 2002 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and s_state in ('SD','TN','GA','SC', - 'MO','AL','MI','OH') - group by rollup(i_category,i_class) - order by - lochierarchy desc - ,case when lochierarchy = 0 then i_category end - ,rank_within_parent - limit 100""" - qt_ds_shape_36 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query37.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query37.groovy deleted file mode 100644 index e5f353ab44a134..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query37.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query37") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, catalog_sales - where i_current_price between 45 and 45 + 30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-02-21' as date) and (cast('1999-02-21' as date) + interval 60 day) - and i_manufact_id in (856,707,1000,747) - and inv_quantity_on_hand between 100 and 500 - and cs_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100""" - qt_ds_shape_37 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query38.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query38.groovy deleted file mode 100644 index 7a7c71813664ed..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query38.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query38") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """select count(*) from ( - select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 -) hot_cust -limit 100""" - qt_ds_shape_38 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query39.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query39.groovy deleted file mode 100644 index 55c7135fca9e8d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query39.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query39") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with inv as -(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stdev,mean, case mean when 0 then null else stdev/mean end cov - from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean - from inventory - ,item - ,warehouse - ,date_dim - where inv_item_sk = i_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_year =1998 - group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo - where case mean when 0 then 0 else stdev/mean end > 1) -select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov - ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov -from inv inv1,inv inv2 -where inv1.i_item_sk = inv2.i_item_sk - and inv1.w_warehouse_sk = inv2.w_warehouse_sk - and inv1.d_moy=1 - and inv2.d_moy=1+1 -order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov - ,inv2.d_moy,inv2.mean, inv2.cov""" - qt_ds_shape_39 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query4.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query4.groovy deleted file mode 100644 index 969981b25f7cac..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query4.groovy +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)/2) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((cs_ext_list_price-cs_ext_wholesale_cost-cs_ext_discount_amt)+cs_ext_sales_price)/2) ) year_total - ,'c' sale_type - from customer - ,catalog_sales - ,date_dim - where c_customer_sk = cs_bill_customer_sk - and cs_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year -union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((ws_ext_list_price-ws_ext_wholesale_cost-ws_ext_discount_amt)+ws_ext_sales_price)/2) ) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_c_firstyear - ,year_total t_c_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_c_secyear.customer_id - and t_s_firstyear.customer_id = t_c_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_c_firstyear.sale_type = 'c' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_c_secyear.sale_type = 'c' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1999 - and t_s_secyear.dyear = 1999+1 - and t_c_firstyear.dyear = 1999 - and t_c_secyear.dyear = 1999+1 - and t_w_firstyear.dyear = 1999 - and t_w_secyear.dyear = 1999+1 - and t_s_firstyear.year_total > 0 - and t_c_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100""" - qt_ds_shape_4 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query40.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query40.groovy deleted file mode 100644 index 8d261843553c84..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query40.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query40") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - w_state - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before - ,sum(case when (cast(d_date as date) >= cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after - from - catalog_sales left outer join catalog_returns on - (cs_order_number = cr_order_number - and cs_item_sk = cr_item_sk) - ,warehouse - ,item - ,date_dim - where - i_current_price between 0.99 and 1.49 - and i_item_sk = cs_item_sk - and cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and d_date between (cast ('2001-04-02' as date) - interval 30 day) - and (cast ('2001-04-02' as date) + interval 30 day) - group by - w_state,i_item_id - order by w_state,i_item_id -limit 100""" - qt_ds_shape_40 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query41.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query41.groovy deleted file mode 100644 index 68ea4200724d18..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query41.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query41") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select distinct(i_product_name) - from item i1 - where i_manufact_id between 748 and 748+40 - and (select count(*) as item_cnt - from item - where (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'gainsboro' or i_color = 'aquamarine') and - (i_units = 'Ounce' or i_units = 'Dozen') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'chiffon' or i_color = 'violet') and - (i_units = 'Ton' or i_units = 'Pound') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'chartreuse' or i_color = 'blue') and - (i_units = 'Each' or i_units = 'Oz') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'tan' or i_color = 'dodger') and - (i_units = 'Bunch' or i_units = 'Tsp') and - (i_size = 'medium' or i_size = 'economy') - ))) or - (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'blanched' or i_color = 'tomato') and - (i_units = 'Tbl' or i_units = 'Case') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'almond' or i_color = 'lime') and - (i_units = 'Box' or i_units = 'Dram') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'peru' or i_color = 'saddle') and - (i_units = 'Pallet' or i_units = 'Gram') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'indian' or i_color = 'spring') and - (i_units = 'Unknown' or i_units = 'Carton') and - (i_size = 'medium' or i_size = 'economy') - )))) > 0 - order by i_product_name - limit 100""" - qt_ds_shape_41 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query42.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query42.groovy deleted file mode 100644 index df1baef595771e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query42.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query42") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_category_id - ,item.i_category - ,sum(ss_ext_sales_price) - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=11 - and dt.d_year=2002 - group by dt.d_year - ,item.i_category_id - ,item.i_category - order by sum(ss_ext_sales_price) desc,dt.d_year - ,item.i_category_id - ,item.i_category -limit 100 """ - qt_ds_shape_42 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query43.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query43.groovy deleted file mode 100644 index fdc21823808f4f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query43.groovy +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query43") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select s_store_name, s_store_id, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from date_dim, store_sales, store - where d_date_sk = ss_sold_date_sk and - s_store_sk = ss_store_sk and - s_gmt_offset = -5 and - d_year = 2000 - group by s_store_name, s_store_id - order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales - limit 100""" - qt_ds_shape_43 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query44.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query44.groovy deleted file mode 100644 index 085ef7323dc884..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query44.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query44") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100""" - qt_ds_shape_44 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query45.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query45.groovy deleted file mode 100644 index a8d4bc2bff2d7f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query45.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query45") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100""" - qt_ds_shape_45 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query46.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query46.groovy deleted file mode 100644 index 13edc5bad59a57..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query46.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query46") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and (household_demographics.hd_dep_count = 6 or - household_demographics.hd_vehicle_count= 0) - and date_dim.d_dow in (6,0) - and date_dim.d_year in (1999,1999+1,1999+2) - and store.s_city in ('Five Points','Centerville','Oak Grove','Fairview','Liberty') - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - limit 100""" - qt_ds_shape_46 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query47.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query47.groovy deleted file mode 100644 index 63b6ee07cbee57..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query47.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query47") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with v1 as( - select i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, - s_store_name, s_company_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - s_store_name, s_company_name - order by d_year, d_moy) rn - from item, store_sales, date_dim, store - where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - ( - d_year = 2001 or - ( d_year = 2001-1 and d_moy =12) or - ( d_year = 2001+1 and d_moy =1) - ) - group by i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy), - v2 as( - select v1.s_store_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1.s_store_name = v1_lag.s_store_name and - v1.s_store_name = v1_lead.s_store_name and - v1.s_company_name = v1_lag.s_company_name and - v1.s_company_name = v1_lead.s_company_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2001 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100""" - qt_ds_shape_47 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query48.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query48.groovy deleted file mode 100644 index c8594b7fd10cf7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query48.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query48") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select sum (ss_quantity) - from store_sales, store, customer_demographics, customer_address, date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 1999 - and - ( - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'U' - and - cd_education_status = 'Primary' - and - ss_sales_price between 100.00 and 150.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'W' - and - cd_education_status = 'College' - and - ss_sales_price between 50.00 and 100.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'D' - and - cd_education_status = '2 yr Degree' - and - ss_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MD', 'MN', 'IA') - and ss_net_profit between 0 and 2000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('VA', 'IL', 'TX') - and ss_net_profit between 150 and 3000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MI', 'WI', 'IN') - and ss_net_profit between 50 and 25000 - ) - ) -""" - qt_ds_shape_48 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query49.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query49.groovy deleted file mode 100644 index 08a1e2ca819b96..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query49.groovy +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query49") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select channel, item, return_ratio, return_rank, currency_rank from - (select - 'web' as channel - ,web.item - ,web.return_ratio - ,web.return_rank - ,web.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select ws.ws_item_sk as item - ,(cast(sum(coalesce(wr.wr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(wr.wr_return_amt,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - web_sales ws left outer join web_returns wr - on (ws.ws_order_number = wr.wr_order_number and - ws.ws_item_sk = wr.wr_item_sk) - ,date_dim - where - wr.wr_return_amt > 10000 - and ws.ws_net_profit > 1 - and ws.ws_net_paid > 0 - and ws.ws_quantity > 0 - and ws_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by ws.ws_item_sk - ) in_web - ) web - where - ( - web.return_rank <= 10 - or - web.currency_rank <= 10 - ) - union - select - 'catalog' as channel - ,catalog.item - ,catalog.return_ratio - ,catalog.return_rank - ,catalog.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select - cs.cs_item_sk as item - ,(cast(sum(coalesce(cr.cr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(cr.cr_return_amount,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - catalog_sales cs left outer join catalog_returns cr - on (cs.cs_order_number = cr.cr_order_number and - cs.cs_item_sk = cr.cr_item_sk) - ,date_dim - where - cr.cr_return_amount > 10000 - and cs.cs_net_profit > 1 - and cs.cs_net_paid > 0 - and cs.cs_quantity > 0 - and cs_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by cs.cs_item_sk - ) in_cat - ) catalog - where - ( - catalog.return_rank <= 10 - or - catalog.currency_rank <=10 - ) - union - select - 'store' as channel - ,store.item - ,store.return_ratio - ,store.return_rank - ,store.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select sts.ss_item_sk as item - ,(cast(sum(coalesce(sr.sr_return_quantity,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(sr.sr_return_amt,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - store_sales sts left outer join store_returns sr - on (sts.ss_ticket_number = sr.sr_ticket_number and sts.ss_item_sk = sr.sr_item_sk) - ,date_dim - where - sr.sr_return_amt > 10000 - and sts.ss_net_profit > 1 - and sts.ss_net_paid > 0 - and sts.ss_quantity > 0 - and ss_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by sts.ss_item_sk - ) in_store - ) store - where ( - store.return_rank <= 10 - or - store.currency_rank <= 10 - ) - ) - t order by 1,4,5,2 - limit 100""" - qt_ds_shape_49 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query5.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query5.groovy deleted file mode 100644 index 43524c4c6e8a28..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query5.groovy +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssr as - (select s_store_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ss_store_sk as store_sk, - ss_sold_date_sk as date_sk, - ss_ext_sales_price as sales_price, - ss_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from store_sales - union all - select sr_store_sk as store_sk, - sr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - sr_return_amt as return_amt, - sr_net_loss as net_loss - from store_returns - ) salesreturns, - date_dim, - store - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and store_sk = s_store_sk - group by s_store_id) - , - csr as - (select cp_catalog_page_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select cs_catalog_page_sk as page_sk, - cs_sold_date_sk as date_sk, - cs_ext_sales_price as sales_price, - cs_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from catalog_sales - union all - select cr_catalog_page_sk as page_sk, - cr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - cr_return_amount as return_amt, - cr_net_loss as net_loss - from catalog_returns - ) salesreturns, - date_dim, - catalog_page - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and page_sk = cp_catalog_page_sk - group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ws_web_site_sk as wsr_web_site_sk, - ws_sold_date_sk as date_sk, - ws_ext_sales_price as sales_price, - ws_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from web_sales - union all - select ws_web_site_sk as wsr_web_site_sk, - wr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - wr_return_amt as return_amt, - wr_net_loss as net_loss - from web_returns left outer join web_sales on - ( wr_item_sk = ws_item_sk - and wr_order_number = ws_order_number) - ) salesreturns, - date_dim, - web_site - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and wsr_web_site_sk = web_site_sk - group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', s_store_id) id - , sales - , returns - , (profit - profit_loss) as profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', cp_catalog_page_id) id - , sales - , returns - , (profit - profit_loss) as profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) id - , sales - , returns - , (profit - profit_loss) as profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_5 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query50.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query50.groovy deleted file mode 100644 index fa5bd0908d6201..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query50.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query50") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 30) and - (sr_returned_date_sk - ss_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 60) and - (sr_returned_date_sk - ss_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 90) and - (sr_returned_date_sk - ss_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - store_sales - ,store_returns - ,store - ,date_dim d1 - ,date_dim d2 -where - d2.d_year = 2001 -and d2.d_moy = 8 -and ss_ticket_number = sr_ticket_number -and ss_item_sk = sr_item_sk -and ss_sold_date_sk = d1.d_date_sk -and sr_returned_date_sk = d2.d_date_sk -and ss_customer_sk = sr_customer_sk -and ss_store_sk = s_store_sk -group by - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -order by s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -limit 100""" - qt_ds_shape_50 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query51.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query51.groovy deleted file mode 100644 index 7a00b2b7a52de4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query51.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query51") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """WITH web_v1 as ( -select - ws_item_sk item_sk, d_date, - sum(sum(ws_sales_price)) - over (partition by ws_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from web_sales - ,date_dim -where ws_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ws_item_sk is not NULL -group by ws_item_sk, d_date), -store_v1 as ( -select - ss_item_sk item_sk, d_date, - sum(sum(ss_sales_price)) - over (partition by ss_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from store_sales - ,date_dim -where ss_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ss_item_sk is not NULL -group by ss_item_sk, d_date) - select * -from (select item_sk - ,d_date - ,web_sales - ,store_sales - ,max(web_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) web_cumulative - ,max(store_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) store_cumulative - from (select case when web.item_sk is not null then web.item_sk else store.item_sk end item_sk - ,case when web.d_date is not null then web.d_date else store.d_date end d_date - ,web.cume_sales web_sales - ,store.cume_sales store_sales - from web_v1 web full outer join store_v1 store on (web.item_sk = store.item_sk - and web.d_date = store.d_date) - )x )y -where web_cumulative > store_cumulative -order by item_sk - ,d_date -limit 100""" - qt_ds_shape_51 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query52.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query52.groovy deleted file mode 100644 index aca150ca790e5d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query52.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query52") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_ext_sales_price) ext_price - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=12 - and dt.d_year=2002 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,ext_price desc - ,brand_id -limit 100 """ - qt_ds_shape_52 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query53.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query53.groovy deleted file mode 100644 index 62240282e24579..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query53.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query53") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * from -(select i_manufact_id, -sum(ss_sales_price) sum_sales, -avg(sum(ss_sales_price)) over (partition by i_manufact_id) avg_quarterly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and -ss_sold_date_sk = d_date_sk and -ss_store_sk = s_store_sk and -d_month_seq in (1200,1200+1,1200+2,1200+3,1200+4,1200+5,1200+6,1200+7,1200+8,1200+9,1200+10,1200+11) and -((i_category in ('Books','Children','Electronics') and -i_class in ('personal','portable','reference','self-help') and -i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) -or(i_category in ('Women','Music','Men') and -i_class in ('accessories','classical','fragrances','pants') and -i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manufact_id, d_qoy ) tmp1 -where case when avg_quarterly_sales > 0 - then abs (sum_sales - avg_quarterly_sales)/ avg_quarterly_sales - else null end > 0.1 -order by avg_quarterly_sales, - sum_sales, - i_manufact_id -limit 100""" - qt_ds_shape_53 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query54.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query54.groovy deleted file mode 100644 index 0d7fb531304d95..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query54.groovy +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query54") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Women' - and i_class = 'maternity' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 5 - and d_year = 1998 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1998 and d_moy = 5) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1998 and d_moy = 5) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100""" - qt_ds_shape_54 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query55.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query55.groovy deleted file mode 100644 index 37af78fb24bf3c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query55.groovy +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query55") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=100 - and d_moy=12 - and d_year=2000 - group by i_brand, i_brand_id - order by ext_price desc, i_brand_id -limit 100 """ - qt_ds_shape_55 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query56.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query56.groovy deleted file mode 100644 index b96a95094fe147..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query56.groovy +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query56") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100""" - qt_ds_shape_56 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query57.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query57.groovy deleted file mode 100644 index a258a524731224..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query57.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query57") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with v1 as( - select i_category, i_brand, - cc_name, - d_year, d_moy, - sum(cs_sales_price) sum_sales, - avg(sum(cs_sales_price)) over - (partition by i_category, i_brand, - cc_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - cc_name - order by d_year, d_moy) rn - from item, catalog_sales, date_dim, call_center - where cs_item_sk = i_item_sk and - cs_sold_date_sk = d_date_sk and - cc_call_center_sk= cs_call_center_sk and - ( - d_year = 1999 or - ( d_year = 1999-1 and d_moy =12) or - ( d_year = 1999+1 and d_moy =1) - ) - group by i_category, i_brand, - cc_name , d_year, d_moy), - v2 as( - select v1.i_brand - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1. cc_name = v1_lag. cc_name and - v1. cc_name = v1_lead. cc_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 1999 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100""" - qt_ds_shape_57 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query58.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query58.groovy deleted file mode 100644 index 0ebb6288536ae8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query58.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query58") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss_items as - (select i_item_id item_id - ,sum(ss_ext_sales_price) ss_item_rev - from store_sales - ,item - ,date_dim - where ss_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ss_sold_date_sk = d_date_sk - group by i_item_id), - cs_items as - (select i_item_id item_id - ,sum(cs_ext_sales_price) cs_item_rev - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and cs_sold_date_sk = d_date_sk - group by i_item_id), - ws_items as - (select i_item_id item_id - ,sum(ws_ext_sales_price) ws_item_rev - from web_sales - ,item - ,date_dim - where ws_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq =(select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ws_sold_date_sk = d_date_sk - group by i_item_id) - select ss_items.item_id - ,ss_item_rev - ,ss_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ss_dev - ,cs_item_rev - ,cs_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 cs_dev - ,ws_item_rev - ,ws_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ws_dev - ,(ss_item_rev+cs_item_rev+ws_item_rev)/3 average - from ss_items,cs_items,ws_items - where ss_items.item_id=cs_items.item_id - and ss_items.item_id=ws_items.item_id - and ss_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - and ss_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and cs_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and cs_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and ws_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and ws_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - order by item_id - ,ss_item_rev - limit 100""" - qt_ds_shape_58 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query59.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query59.groovy deleted file mode 100644 index 15005ea7f1638a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query59.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query59") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with wss as - (select d_week_seq, - ss_store_sk, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - group by d_week_seq,ss_store_sk - ) - select s_store_name1,s_store_id1,d_week_seq1 - ,sun_sales1/sun_sales2,mon_sales1/mon_sales2 - ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2 - ,fri_sales1/fri_sales2,sat_sales1/sat_sales2 - from - (select s_store_name s_store_name1,wss.d_week_seq d_week_seq1 - ,s_store_id s_store_id1,sun_sales sun_sales1 - ,mon_sales mon_sales1,tue_sales tue_sales1 - ,wed_sales wed_sales1,thu_sales thu_sales1 - ,fri_sales fri_sales1,sat_sales sat_sales1 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196 and 1196 + 11) y, - (select s_store_name s_store_name2,wss.d_week_seq d_week_seq2 - ,s_store_id s_store_id2,sun_sales sun_sales2 - ,mon_sales mon_sales2,tue_sales tue_sales2 - ,wed_sales wed_sales2,thu_sales thu_sales2 - ,fri_sales fri_sales2,sat_sales sat_sales2 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196+ 12 and 1196 + 23) x - where s_store_id1=s_store_id2 - and d_week_seq1=d_week_seq2-52 - order by s_store_name1,s_store_id1,d_week_seq1 -limit 100""" - qt_ds_shape_59 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query6.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query6.groovy deleted file mode 100644 index 54b0e9c129fd72..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query6.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100""" - qt_ds_shape_6 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query60.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query60.groovy deleted file mode 100644 index d980e0cbc3a1b8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query60.groovy +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query60") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select - i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - cs as ( - select - i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - ws as ( - select - i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id) - select - i_item_id -,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by i_item_id - ,total_sales - limit 100""" - qt_ds_shape_60 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query61.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query61.groovy deleted file mode 100644 index 0ea2596fdf01be..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query61.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query61") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) all_sales -order by promotions, total -limit 100""" - qt_ds_shape_61 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query62.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query62.groovy deleted file mode 100644 index 95fe33f4c608c0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query62.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query62") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 30) and - (ws_ship_date_sk - ws_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 60) and - (ws_ship_date_sk - ws_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 90) and - (ws_ship_date_sk - ws_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - web_sales - ,warehouse - ,ship_mode - ,web_site - ,date_dim -where - d_month_seq between 1194 and 1194 + 11 -and ws_ship_date_sk = d_date_sk -and ws_warehouse_sk = w_warehouse_sk -and ws_ship_mode_sk = sm_ship_mode_sk -and ws_web_site_sk = web_site_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -limit 100""" - qt_ds_shape_62 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query63.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query63.groovy deleted file mode 100644 index 367ee302c10a70..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query63.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query63") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select i_manager_id - ,sum(ss_sales_price) sum_sales - ,avg(sum(ss_sales_price)) over (partition by i_manager_id) avg_monthly_sales - from item - ,store_sales - ,date_dim - ,store - where ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and d_month_seq in (1181,1181+1,1181+2,1181+3,1181+4,1181+5,1181+6,1181+7,1181+8,1181+9,1181+10,1181+11) - and (( i_category in ('Books','Children','Electronics') - and i_class in ('personal','portable','reference','self-help') - and i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) - or( i_category in ('Women','Music','Men') - and i_class in ('accessories','classical','fragrances','pants') - and i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manager_id, d_moy) tmp1 -where case when avg_monthly_sales > 0 then abs (sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 -order by i_manager_id - ,avg_monthly_sales - ,sum_sales -limit 100""" - qt_ds_shape_63 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query64.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query64.groovy deleted file mode 100644 index 5644db2d92c867..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query64.groovy +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query64") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), -cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('blanched','medium','brown','chocolate','burlywood','drab') and - i_current_price between 23 and 23 + 10 and - i_current_price between 23 + 1 and 23 + 15 -group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year -) -select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt -from cross_sales cs1,cross_sales cs2 -where cs1.item_sk=cs2.item_sk and - cs1.syear = 2001 and - cs2.syear = 2001 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip -order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1""" - qt_ds_shape_64 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query65.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query65.groovy deleted file mode 100644 index 9f5108267560a3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query65.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query65") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - s_store_name, - i_item_desc, - sc.revenue, - i_current_price, - i_wholesale_cost, - i_brand - from store, item, - (select ss_store_sk, avg(revenue) as ave - from - (select ss_store_sk, ss_item_sk, - sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sa - group by ss_store_sk) sb, - (select ss_store_sk, ss_item_sk, sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sc - where sb.ss_store_sk = sc.ss_store_sk and - sc.revenue <= 0.1 * sb.ave and - s_store_sk = sc.ss_store_sk and - i_item_sk = sc.ss_item_sk - order by s_store_name, i_item_desc -limit 100""" - qt_ds_shape_65 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query66.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query66.groovy deleted file mode 100644 index e9b0f8c7e89f05..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query66.groovy +++ /dev/null @@ -1,260 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query66") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - ,sum(jan_sales) as jan_sales - ,sum(feb_sales) as feb_sales - ,sum(mar_sales) as mar_sales - ,sum(apr_sales) as apr_sales - ,sum(may_sales) as may_sales - ,sum(jun_sales) as jun_sales - ,sum(jul_sales) as jul_sales - ,sum(aug_sales) as aug_sales - ,sum(sep_sales) as sep_sales - ,sum(oct_sales) as oct_sales - ,sum(nov_sales) as nov_sales - ,sum(dec_sales) as dec_sales - ,sum(jan_sales/w_warehouse_sq_ft) as jan_sales_per_sq_foot - ,sum(feb_sales/w_warehouse_sq_ft) as feb_sales_per_sq_foot - ,sum(mar_sales/w_warehouse_sq_ft) as mar_sales_per_sq_foot - ,sum(apr_sales/w_warehouse_sq_ft) as apr_sales_per_sq_foot - ,sum(may_sales/w_warehouse_sq_ft) as may_sales_per_sq_foot - ,sum(jun_sales/w_warehouse_sq_ft) as jun_sales_per_sq_foot - ,sum(jul_sales/w_warehouse_sq_ft) as jul_sales_per_sq_foot - ,sum(aug_sales/w_warehouse_sq_ft) as aug_sales_per_sq_foot - ,sum(sep_sales/w_warehouse_sq_ft) as sep_sales_per_sq_foot - ,sum(oct_sales/w_warehouse_sq_ft) as oct_sales_per_sq_foot - ,sum(nov_sales/w_warehouse_sq_ft) as nov_sales_per_sq_foot - ,sum(dec_sales/w_warehouse_sq_ft) as dec_sales_per_sq_foot - ,sum(jan_net) as jan_net - ,sum(feb_net) as feb_net - ,sum(mar_net) as mar_net - ,sum(apr_net) as apr_net - ,sum(may_net) as may_net - ,sum(jun_net) as jun_net - ,sum(jul_net) as jul_net - ,sum(aug_net) as aug_net - ,sum(sep_net) as sep_net - ,sum(oct_net) as oct_net - ,sum(nov_net) as nov_net - ,sum(dec_net) as dec_net - from ( - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then ws_ext_sales_price* ws_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then ws_ext_sales_price* ws_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then ws_ext_sales_price* ws_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then ws_ext_sales_price* ws_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then ws_ext_sales_price* ws_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then ws_ext_sales_price* ws_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then ws_ext_sales_price* ws_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then ws_ext_sales_price* ws_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then ws_ext_sales_price* ws_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then ws_ext_sales_price* ws_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then ws_ext_sales_price* ws_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then ws_ext_sales_price* ws_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as dec_net - from - web_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - ws_warehouse_sk = w_warehouse_sk - and ws_sold_date_sk = d_date_sk - and ws_sold_time_sk = t_time_sk - and ws_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 and 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - union all - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then cs_ext_list_price* cs_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then cs_ext_list_price* cs_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then cs_ext_list_price* cs_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then cs_ext_list_price* cs_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then cs_ext_list_price* cs_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then cs_ext_list_price* cs_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then cs_ext_list_price* cs_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then cs_ext_list_price* cs_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then cs_ext_list_price* cs_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then cs_ext_list_price* cs_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then cs_ext_list_price* cs_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then cs_ext_list_price* cs_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as dec_net - from - catalog_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and cs_sold_time_sk = t_time_sk - and cs_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 AND 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - ) x - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - order by w_warehouse_name - limit 100""" - qt_ds_shape_66 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query67.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query67.groovy deleted file mode 100644 index bba4022c805c07..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query67.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query67") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1206 and 1206+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100""" - qt_ds_shape_67 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query68.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query68.groovy deleted file mode 100644 index 08bcc07785d10d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query68.groovy +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query68") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 8 or - household_demographics.hd_vehicle_count= -1) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Pleasant Hill','Five Points') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100""" - qt_ds_shape_68 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query69.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query69.groovy deleted file mode 100644 index 1122aa1716cd53..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query69.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query69") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_state in ('TX','VA','MI') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - (not exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - not exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - limit 100""" - qt_ds_shape_69 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query7.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query7.groovy deleted file mode 100644 index 0031a3088da690..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query7.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, item, promotion - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_cdemo_sk = cd_demo_sk and - ss_promo_sk = p_promo_sk and - cd_gender = 'F' and - cd_marital_status = 'W' and - cd_education_status = 'College' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100""" - qt_ds_shape_7 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query70.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query70.groovy deleted file mode 100644 index d79b36bec668a6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query70.groovy +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query70") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ss_net_profit) as total_sum - ,s_state - ,s_county - ,grouping(s_state)+grouping(s_county) as lochierarchy - ,rank() over ( - partition by grouping(s_state)+grouping(s_county), - case when grouping(s_county) = 0 then s_state end - order by sum(ss_net_profit) desc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,store - where - d1.d_month_seq between 1213 and 1213+11 - and d1.d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - and s_state in - ( select s_state - from (select s_state as s_state, - rank() over ( partition by s_state order by sum(ss_net_profit) desc) as ranking - from store_sales, store, date_dim - where d_month_seq between 1213 and 1213+11 - and d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - group by s_state - ) tmp1 - where ranking <= 5 - ) - group by rollup(s_state,s_county) - order by - lochierarchy desc - ,case when lochierarchy = 0 then s_state end - ,rank_within_parent - limit 100""" - qt_ds_shape_70 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query71.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query71.groovy deleted file mode 100644 index 480c77aaffce75..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query71.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query71") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand,t_hour,t_minute, - sum(ext_price) ext_price - from item, (select ws_ext_sales_price as ext_price, - ws_sold_date_sk as sold_date_sk, - ws_item_sk as sold_item_sk, - ws_sold_time_sk as time_sk - from web_sales,date_dim - where d_date_sk = ws_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select cs_ext_sales_price as ext_price, - cs_sold_date_sk as sold_date_sk, - cs_item_sk as sold_item_sk, - cs_sold_time_sk as time_sk - from catalog_sales,date_dim - where d_date_sk = cs_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select ss_ext_sales_price as ext_price, - ss_sold_date_sk as sold_date_sk, - ss_item_sk as sold_item_sk, - ss_sold_time_sk as time_sk - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - and d_moy=12 - and d_year=1998 - ) tmp,time_dim - where - sold_item_sk = i_item_sk - and i_manager_id=1 - and time_sk = t_time_sk - and (t_meal_time = 'breakfast' or t_meal_time = 'dinner') - group by i_brand, i_brand_id,t_hour,t_minute - order by ext_price desc, i_brand_id - """ - qt_ds_shape_71 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query72.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query72.groovy deleted file mode 100644 index 73b574b9fdd0dc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query72.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query72") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select /*+ SET_VAR(max_join_number_bushy_tree=10, memo_max_group_expression_size=15000)*/ i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and (d3.d_date > (d1.d_date + INTERVAL '5' DAY)) - and hd_buy_potential = '501-1000' - and d1.d_year = 2002 - and cd_marital_status = 'W' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100""" - qt_ds_shape_72 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query73.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query73.groovy deleted file mode 100644 index 19d399ee7ea84b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query73.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query73") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_buy_potential = '501-1000' or - household_demographics.hd_buy_potential = 'Unknown') - and household_demographics.hd_vehicle_count > 0 - and case when household_demographics.hd_vehicle_count > 0 then - household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Fairfield County','Walker County','Daviess County','Barrow County') - group by ss_ticket_number,ss_customer_sk) dj,customer - where ss_customer_sk = c_customer_sk - and cnt between 1 and 5 - order by cnt desc, c_last_name asc""" - qt_ds_shape_73 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query74.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query74.groovy deleted file mode 100644 index 65c65d66bcf294..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query74.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query74") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ss_net_paid) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ws_net_paid) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - ) - select - t_s_secyear.customer_id, t_s_secyear.customer_first_name, t_s_secyear.customer_last_name - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.year = 1999 - and t_s_secyear.year = 1999+1 - and t_w_firstyear.year = 1999 - and t_w_secyear.year = 1999+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - order by 2,1,3 -limit 100""" - qt_ds_shape_74 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query75.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query75.groovy deleted file mode 100644 index 0afdc93abf33f8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query75.groovy +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query75") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """WITH all_sales AS ( - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,SUM(sales_cnt) AS sales_cnt - ,SUM(sales_amt) AS sales_amt - FROM (SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,cs_quantity - COALESCE(cr_return_quantity,0) AS sales_cnt - ,cs_ext_sales_price - COALESCE(cr_return_amount,0.0) AS sales_amt - FROM catalog_sales JOIN item ON i_item_sk=cs_item_sk - JOIN date_dim ON d_date_sk=cs_sold_date_sk - LEFT JOIN catalog_returns ON (cs_order_number=cr_order_number - AND cs_item_sk=cr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ss_quantity - COALESCE(sr_return_quantity,0) AS sales_cnt - ,ss_ext_sales_price - COALESCE(sr_return_amt,0.0) AS sales_amt - FROM store_sales JOIN item ON i_item_sk=ss_item_sk - JOIN date_dim ON d_date_sk=ss_sold_date_sk - LEFT JOIN store_returns ON (ss_ticket_number=sr_ticket_number - AND ss_item_sk=sr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ws_quantity - COALESCE(wr_return_quantity,0) AS sales_cnt - ,ws_ext_sales_price - COALESCE(wr_return_amt,0.0) AS sales_amt - FROM web_sales JOIN item ON i_item_sk=ws_item_sk - JOIN date_dim ON d_date_sk=ws_sold_date_sk - LEFT JOIN web_returns ON (ws_order_number=wr_order_number - AND ws_item_sk=wr_item_sk) - WHERE i_category='Home') sales_detail - GROUP BY d_year, i_brand_id, i_class_id, i_category_id, i_manufact_id) - SELECT prev_yr.d_year AS prev_year - ,curr_yr.d_year AS year - ,curr_yr.i_brand_id - ,curr_yr.i_class_id - ,curr_yr.i_category_id - ,curr_yr.i_manufact_id - ,prev_yr.sales_cnt AS prev_yr_cnt - ,curr_yr.sales_cnt AS curr_yr_cnt - ,curr_yr.sales_cnt-prev_yr.sales_cnt AS sales_cnt_diff - ,curr_yr.sales_amt-prev_yr.sales_amt AS sales_amt_diff - FROM all_sales curr_yr, all_sales prev_yr - WHERE curr_yr.i_brand_id=prev_yr.i_brand_id - AND curr_yr.i_class_id=prev_yr.i_class_id - AND curr_yr.i_category_id=prev_yr.i_category_id - AND curr_yr.i_manufact_id=prev_yr.i_manufact_id - AND curr_yr.d_year=1999 - AND prev_yr.d_year=1999-1 - AND CAST(curr_yr.sales_cnt AS DECIMAL(17,2))/CAST(prev_yr.sales_cnt AS DECIMAL(17,2))<0.9 - ORDER BY sales_cnt_diff,sales_amt_diff - limit 100""" - qt_ds_shape_75 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query76.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query76.groovy deleted file mode 100644 index 52b75d1628bfd7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query76.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query76") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( - SELECT 'store' as channel, 'ss_hdemo_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price - FROM store_sales, item, date_dim - WHERE ss_hdemo_sk IS NULL - AND ss_sold_date_sk=d_date_sk - AND ss_item_sk=i_item_sk - UNION ALL - SELECT 'web' as channel, 'ws_bill_addr_sk' col_name, d_year, d_qoy, i_category, ws_ext_sales_price ext_sales_price - FROM web_sales, item, date_dim - WHERE ws_bill_addr_sk IS NULL - AND ws_sold_date_sk=d_date_sk - AND ws_item_sk=i_item_sk - UNION ALL - SELECT 'catalog' as channel, 'cs_warehouse_sk' col_name, d_year, d_qoy, i_category, cs_ext_sales_price ext_sales_price - FROM catalog_sales, item, date_dim - WHERE cs_warehouse_sk IS NULL - AND cs_sold_date_sk=d_date_sk - AND cs_item_sk=i_item_sk) foo -GROUP BY channel, col_name, d_year, d_qoy, i_category -ORDER BY channel, col_name, d_year, d_qoy, i_category -limit 100""" - qt_ds_shape_76 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query77.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query77.groovy deleted file mode 100644 index 22483589531b2d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query77.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query77") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as - (select s_store_sk, - sum(ss_ext_sales_price) as sales, - sum(ss_net_profit) as profit - from store_sales, - date_dim, - store - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ss_store_sk = s_store_sk - group by s_store_sk) - , - sr as - (select s_store_sk, - sum(sr_return_amt) as returns, - sum(sr_net_loss) as profit_loss - from store_returns, - date_dim, - store - where sr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and sr_store_sk = s_store_sk - group by s_store_sk), - cs as - (select cs_call_center_sk, - sum(cs_ext_sales_price) as sales, - sum(cs_net_profit) as profit - from catalog_sales, - date_dim - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cs_call_center_sk - ), - cr as - (select cr_call_center_sk, - sum(cr_return_amount) as returns, - sum(cr_net_loss) as profit_loss - from catalog_returns, - date_dim - where cr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cr_call_center_sk - ), - ws as - ( select wp_web_page_sk, - sum(ws_ext_sales_price) as sales, - sum(ws_net_profit) as profit - from web_sales, - date_dim, - web_page - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ws_web_page_sk = wp_web_page_sk - group by wp_web_page_sk), - wr as - (select wp_web_page_sk, - sum(wr_return_amt) as returns, - sum(wr_net_loss) as profit_loss - from web_returns, - date_dim, - web_page - where wr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and wr_web_page_sk = wp_web_page_sk - group by wp_web_page_sk) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , ss.s_store_sk as id - , sales - , coalesce(returns, 0) as returns - , (profit - coalesce(profit_loss,0)) as profit - from ss left join sr - on ss.s_store_sk = sr.s_store_sk - union all - select 'catalog channel' as channel - , cs_call_center_sk as id - , sales - , returns - , (profit - profit_loss) as profit - from cs - , cr - union all - select 'web channel' as channel - , ws.wp_web_page_sk as id - , sales - , coalesce(returns, 0) returns - , (profit - coalesce(profit_loss,0)) as profit - from ws left join wr - on ws.wp_web_page_sk = wr.wp_web_page_sk - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_77 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query78.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query78.groovy deleted file mode 100644 index eee40a4fc03dfc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query78.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query78") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null - group by d_year, ss_item_sk, ss_customer_sk - ) - select -ss_item_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=2000 -order by - ss_item_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100""" - qt_ds_shape_78 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query79.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query79.groovy deleted file mode 100644 index 45bd5452c078c6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query79.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query79") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,store.s_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (household_demographics.hd_dep_count = 5 or household_demographics.hd_vehicle_count > 4) - and date_dim.d_dow = 1 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_number_employees between 200 and 295 - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer - where ss_customer_sk = c_customer_sk - order by c_last_name,c_first_name,substr(s_city,1,30), profit -limit 100""" - qt_ds_shape_79 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query8.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query8.groovy deleted file mode 100644 index e44e47a69e9fdd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query8.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100""" - qt_ds_shape_8 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query80.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query80.groovy deleted file mode 100644 index a8924221229cbf..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query80.groovy +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query80") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssr as - (select s_store_id as store_id, - sum(ss_ext_sales_price) as sales, - sum(coalesce(sr_return_amt, 0)) as returns, - sum(ss_net_profit - coalesce(sr_net_loss, 0)) as profit - from store_sales left outer join store_returns on - (ss_item_sk = sr_item_sk and ss_ticket_number = sr_ticket_number), - date_dim, - store, - item, - promotion - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ss_store_sk = s_store_sk - and ss_item_sk = i_item_sk - and i_current_price > 50 - and ss_promo_sk = p_promo_sk - and p_channel_tv = 'N' - group by s_store_id) - , - csr as - (select cp_catalog_page_id as catalog_page_id, - sum(cs_ext_sales_price) as sales, - sum(coalesce(cr_return_amount, 0)) as returns, - sum(cs_net_profit - coalesce(cr_net_loss, 0)) as profit - from catalog_sales left outer join catalog_returns on - (cs_item_sk = cr_item_sk and cs_order_number = cr_order_number), - date_dim, - catalog_page, - item, - promotion - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and cs_catalog_page_sk = cp_catalog_page_sk - and cs_item_sk = i_item_sk - and i_current_price > 50 - and cs_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(ws_ext_sales_price) as sales, - sum(coalesce(wr_return_amt, 0)) as returns, - sum(ws_net_profit - coalesce(wr_net_loss, 0)) as profit - from web_sales left outer join web_returns on - (ws_item_sk = wr_item_sk and ws_order_number = wr_order_number), - date_dim, - web_site, - item, - promotion - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ws_web_site_sk = web_site_sk - and ws_item_sk = i_item_sk - and i_current_price > 50 - and ws_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', store_id) as id - , sales - , returns - , profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', catalog_page_id) as id - , sales - , returns - , profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) as id - , sales - , returns - , profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_80 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query81.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query81.groovy deleted file mode 100644 index 85088a4553fa7c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query81.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query81") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as - (select cr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(cr_return_amt_inc_tax) as ctr_total_return - from catalog_returns - ,date_dim - ,customer_address - where cr_returned_date_sk = d_date_sk - and d_year =2002 - and cr_returning_addr_sk = ca_address_sk - group by cr_returning_customer_sk - ,ca_state ) - select c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'CA' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - limit 100""" - qt_ds_shape_81 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query82.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query82.groovy deleted file mode 100644 index 1663dcdb3ee7b9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query82.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query82") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, store_sales - where i_current_price between 17 and 17+30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-07-09' as date) and (cast('1999-07-09' as date) + interval 60 day) - and i_manufact_id in (639,169,138,339) - and inv_quantity_on_hand between 100 and 500 - and ss_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100""" - qt_ds_shape_82 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query83.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query83.groovy deleted file mode 100644 index a8fd8561e20a09..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query83.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query83") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with sr_items as - (select i_item_id item_id, - sum(sr_return_quantity) sr_item_qty - from store_returns, - item, - date_dim - where sr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and sr_returned_date_sk = d_date_sk - group by i_item_id), - cr_items as - (select i_item_id item_id, - sum(cr_return_quantity) cr_item_qty - from catalog_returns, - item, - date_dim - where cr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and cr_returned_date_sk = d_date_sk - group by i_item_id), - wr_items as - (select i_item_id item_id, - sum(wr_return_quantity) wr_item_qty - from web_returns, - item, - date_dim - where wr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and wr_returned_date_sk = d_date_sk - group by i_item_id) - select sr_items.item_id - ,sr_item_qty - ,sr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 sr_dev - ,cr_item_qty - ,cr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 cr_dev - ,wr_item_qty - ,wr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 wr_dev - ,(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 average - from sr_items - ,cr_items - ,wr_items - where sr_items.item_id=cr_items.item_id - and sr_items.item_id=wr_items.item_id - order by sr_items.item_id - ,sr_item_qty - limit 100""" - qt_ds_shape_83 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query84.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query84.groovy deleted file mode 100644 index 01433c96e96718..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query84.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query84") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_customer_id as customer_id - , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername - from customer - ,customer_address - ,customer_demographics - ,household_demographics - ,income_band - ,store_returns - where ca_city = 'Oakwood' - and c_current_addr_sk = ca_address_sk - and ib_lower_bound >= 5806 - and ib_upper_bound <= 5806 + 50000 - and ib_income_band_sk = hd_income_band_sk - and cd_demo_sk = c_current_cdemo_sk - and hd_demo_sk = c_current_hdemo_sk - and sr_cdemo_sk = cd_demo_sk - order by c_customer_id - limit 100""" - qt_ds_shape_84 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query85.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query85.groovy deleted file mode 100644 index 858a94cd81d36b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query85.groovy +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query85") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) - from web_sales, web_returns, web_page, customer_demographics cd1, - customer_demographics cd2, customer_address, date_dim, reason - where ws_web_page_sk = wp_web_page_sk - and ws_item_sk = wr_item_sk - and ws_order_number = wr_order_number - and ws_sold_date_sk = d_date_sk and d_year = 2000 - and cd1.cd_demo_sk = wr_refunded_cdemo_sk - and cd2.cd_demo_sk = wr_returning_cdemo_sk - and ca_address_sk = wr_refunded_addr_sk - and r_reason_sk = wr_reason_sk - and - ( - ( - cd1.cd_marital_status = 'M' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = '4 yr Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 100.00 and 150.00 - ) - or - ( - cd1.cd_marital_status = 'S' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Secondary' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 50.00 and 100.00 - ) - or - ( - cd1.cd_marital_status = 'W' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Advanced Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ca_country = 'United States' - and - ca_state in ('FL', 'TX', 'DE') - and ws_net_profit between 100 and 200 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('IN', 'ND', 'ID') - and ws_net_profit between 150 and 300 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('MT', 'IL', 'OH') - and ws_net_profit between 50 and 250 - ) - ) -group by r_reason_desc -order by substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) -limit 100""" - qt_ds_shape_85 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query86.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query86.groovy deleted file mode 100644 index 71e726382da63e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query86.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query86") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ws_net_paid) as total_sum - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ws_net_paid) desc) as rank_within_parent - from - web_sales - ,date_dim d1 - ,item - where - d1.d_month_seq between 1224 and 1224+11 - and d1.d_date_sk = ws_sold_date_sk - and i_item_sk = ws_item_sk - group by rollup(i_category,i_class) - order by - lochierarchy desc, - case when lochierarchy = 0 then i_category end, - rank_within_parent - limit 100""" - qt_ds_shape_86 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query87.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query87.groovy deleted file mode 100644 index 52e6ab0b62425c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query87.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query87") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select count(*) -from ((select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) -) cool_cust -""" - qt_ds_shape_87 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query88.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query88.groovy deleted file mode 100644 index dcdeb6a1ac4386..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query88.groovy +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query88") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from - (select count(*) h8_30_to_9 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s1, - (select count(*) h9_to_9_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s2, - (select count(*) h9_30_to_10 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s3, - (select count(*) h10_to_10_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s4, - (select count(*) h10_30_to_11 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s5, - (select count(*) h11_to_11_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s6, - (select count(*) h11_30_to_12 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s7, - (select count(*) h12_to_12_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 12 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s8 -""" - qt_ds_shape_88 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query89.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query89.groovy deleted file mode 100644 index 9ddbe3cf68ce57..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query89.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query89") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from( -select i_category, i_class, i_brand, - s_store_name, s_company_name, - d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, s_store_name, s_company_name) - avg_monthly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - d_year in (1999) and - ((i_category in ('Jewelry','Shoes','Electronics') and - i_class in ('semi-precious','athletic','portable') - ) - or (i_category in ('Men','Music','Women') and - i_class in ('accessories','rock','maternity') - )) -group by i_category, i_class, i_brand, - s_store_name, s_company_name, d_moy) tmp1 -where case when (avg_monthly_sales <> 0) then (abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 -order by sum_sales - avg_monthly_sales, s_store_name -limit 100""" - qt_ds_shape_89 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query9.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query9.groovy deleted file mode 100644 index df25c356c66e21..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query9.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_parallel_result_sink=false;" - - def ds = """select case when (select count(*) - from store_sales - where ss_quantity between 1 and 20) > 2972190 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 1 and 20) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 1 and 20) end bucket1 , - case when (select count(*) - from store_sales - where ss_quantity between 21 and 40) > 4505785 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 21 and 40) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 21 and 40) end bucket2, - case when (select count(*) - from store_sales - where ss_quantity between 41 and 60) > 1575726 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 41 and 60) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 41 and 60) end bucket3, - case when (select count(*) - from store_sales - where ss_quantity between 61 and 80) > 3188917 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 61 and 80) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 61 and 80) end bucket4, - case when (select count(*) - from store_sales - where ss_quantity between 81 and 100) > 3525216 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 81 and 100) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 81 and 100) end bucket5 -from reason -where r_reason_sk = 1 -""" - qt_ds_shape_9 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query90.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query90.groovy deleted file mode 100644 index 0f5cde4e25e30a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query90.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query90") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio - from ( select count(*) amc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 10 and 10+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) at, - ( select count(*) pmc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 16 and 16+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) pt - order by am_pm_ratio - limit 100""" - qt_ds_shape_90 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query91.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query91.groovy deleted file mode 100644 index 3c706057a02f5d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query91.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query91") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2001 -and d_moy = 11 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like '1001-5000%' -and ca_gmt_offset = -6 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc""" - qt_ds_shape_91 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query92.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query92.groovy deleted file mode 100644 index 1013519e91975e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query92.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query92") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ws_ext_discount_amt) as "Excess Discount Amount" -from - web_sales - ,item - ,date_dim -where -i_manufact_id = 320 -and i_item_sk = ws_item_sk -and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) -and d_date_sk = ws_sold_date_sk -and ws_ext_discount_amt - > ( - SELECT - 1.3 * avg(ws_ext_discount_amt) - FROM - web_sales - ,date_dim - WHERE - ws_item_sk = i_item_sk - and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) - and d_date_sk = ws_sold_date_sk - ) -order by sum(ws_ext_discount_amt) -limit 100""" - qt_ds_shape_92 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query93.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query93.groovy deleted file mode 100644 index 53d1d63184839e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query93.groovy +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query93") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ss_customer_sk - ,sum(act_sales) sumsales - from (select ss_item_sk - ,ss_ticket_number - ,ss_customer_sk - ,case when sr_return_quantity is not null then (ss_quantity-sr_return_quantity)*ss_sales_price - else (ss_quantity*ss_sales_price) end act_sales - from store_sales left outer join store_returns on (sr_item_sk = ss_item_sk - and sr_ticket_number = ss_ticket_number) - ,reason - where sr_reason_sk = r_reason_sk - and r_reason_desc = 'duplicate purchase') t - group by ss_customer_sk - order by sumsales, ss_customer_sk -limit 100""" - qt_ds_shape_93 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query94.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query94.groovy deleted file mode 100644 index 4888bb535c6dc0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query94.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query94") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2000-2-01' and - (cast('2000-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'OK' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and exists (select * - from web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) -and not exists(select * - from web_returns wr1 - where ws1.ws_order_number = wr1.wr_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_94 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query95.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query95.groovy deleted file mode 100644 index 4f1712c93fd90b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query95.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query95") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=12' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '1999-2-01' and - (cast('1999-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'NC' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_95 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query96.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query96.groovy deleted file mode 100644 index 724e1a1f171702..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query96.groovy +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query96") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select count(*) -from store_sales - ,household_demographics - ,time_dim, store -where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and household_demographics.hd_dep_count = 3 - and store.s_store_name = 'ese' -order by count(*) -limit 100""" - qt_ds_shape_96 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query97.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query97.groovy deleted file mode 100644 index 0a4c7627524edb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query97.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query97") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """with ssci as ( -select ss_customer_sk customer_sk - ,ss_item_sk item_sk -from store_sales,date_dim -where ss_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by ss_customer_sk - ,ss_item_sk), -csci as( - select cs_bill_customer_sk customer_sk - ,cs_item_sk item_sk -from catalog_sales,date_dim -where cs_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by cs_bill_customer_sk - ,cs_item_sk) - select sum(case when ssci.customer_sk is not null and csci.customer_sk is null then 1 else 0 end) store_only - ,sum(case when ssci.customer_sk is null and csci.customer_sk is not null then 1 else 0 end) catalog_only - ,sum(case when ssci.customer_sk is not null and csci.customer_sk is not null then 1 else 0 end) store_and_catalog -from ssci full outer join csci on (ssci.customer_sk=csci.customer_sk - and ssci.item_sk = csci.item_sk) -limit 100""" - qt_ds_shape_97 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query98.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query98.groovy deleted file mode 100644 index 9a401b0184eb3e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query98.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query98") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ss_ext_sales_price) as itemrevenue - ,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over - (partition by i_class) as revenueratio -from - store_sales - ,item - ,date_dim -where - ss_item_sk = i_item_sk - and i_category in ('Sports', 'Music', 'Shoes') - and ss_sold_date_sk = d_date_sk - and d_date between cast('2002-05-20' as date) - and (cast('2002-05-20' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio""" - qt_ds_shape_98 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query99.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query99.groovy deleted file mode 100644 index 6c16d02330fb8e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query99.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query99") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 30) and - (cs_ship_date_sk - cs_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 60) and - (cs_ship_date_sk - cs_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 90) and - (cs_ship_date_sk - cs_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - catalog_sales - ,warehouse - ,ship_mode - ,call_center - ,date_dim -where - d_month_seq between 1224 and 1224 + 11 -and cs_ship_date_sk = d_date_sk -and cs_warehouse_sk = w_warehouse_sk -and cs_ship_mode_sk = sm_ship_mode_sk -and cs_call_center_sk = cc_call_center_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -limit 100""" - qt_ds_shape_99 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/tpcds_sf100_stats.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/tpcds_sf100_stats.groovy deleted file mode 100644 index fc537b63380ef0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/tpcds_sf100_stats.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -suite('tpcds_sf100_stats') { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - def stats - stats = sql """ show column stats call_center ;""" - logger.info("${stats}") - stats = sql """ show column stats catalog_page ;""" - logger.info("${stats}") - stats = sql """ show column stats catalog_returns ;""" - logger.info("${stats}") - stats = sql """ show column stats catalog_sales ;""" - logger.info("${stats}") - stats = sql """ show column stats customer ;""" - logger.info("${stats}") - stats = sql """ show column stats customer_address ;""" - logger.info("${stats}") - stats = sql """ show column stats customer_demographics ;""" - logger.info("${stats}") - stats = sql """ show column stats date_dim ;""" - logger.info("${stats}") - stats = sql """ show column stats dbgen_version ;""" - logger.info("${stats}") - stats = sql """ show column stats household_demographics ;""" - logger.info("${stats}") - stats = sql """ show column stats income_band ;""" - logger.info("${stats}") - stats = sql """ show column stats inventory ;""" - logger.info("${stats}") - stats = sql """ show column stats item ;""" - logger.info("${stats}") - stats = sql """ show column stats promotion ;""" - logger.info("${stats}") - stats = sql """ show column stats reason ;""" - logger.info("${stats}") - stats = sql """ show column stats ship_mode ;""" - logger.info("${stats}") - stats = sql """ show column stats store ;""" - logger.info("${stats}") - stats = sql """ show column stats store_returns ;""" - logger.info("${stats}") - stats = sql """ show column stats store_sales ;""" - logger.info("${stats}") - stats = sql """ show column stats time_dim ;""" - logger.info("${stats}") - stats = sql """ show column stats warehouse ;""" - logger.info("${stats}") - stats = sql """ show column stats web_page ;""" - logger.info("${stats}") - stats = sql """ show column stats web_returns ;""" - logger.info("${stats}") - stats = sql """ show column stats web_sales ;""" - logger.info("${stats}") - stats = sql """ show column stats web_site ;""" - logger.info("${stats}") - -} \ No newline at end of file diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.groovy deleted file mode 100644 index f7acdd26eb8690..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.groovy +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = 'College' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Primary' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'W' - and cd_education_status = '2 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('IL', 'TN', 'TX') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('WY', 'OH', 'ID') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MS', 'SC', 'IA') - and ss_net_profit between 50 and 250 - )) -""" - qt_ds_shape_13 ''' - explain shape plan - select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = 'College' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Primary' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'W' - and cd_education_status = '2 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('IL', 'TN', 'TX') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('WY', 'OH', 'ID') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MS', 'SC', 'IA') - and ss_net_profit between 50 and 250 - )) - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.groovy deleted file mode 100644 index 17466ecacf346e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=14 - and d_moy=11 - and d_year=2002 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 """ - qt_ds_shape_19 ''' - explain shape plan - select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=14 - and d_moy=11 - and d_year=2002 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.groovy deleted file mode 100644 index d42411e0c011d2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.groovy +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query44") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100""" - qt_ds_shape_44 ''' - explain shape plan - select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.groovy deleted file mode 100644 index 9a0eb67545c428..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query45") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100""" - qt_ds_shape_45 ''' - explain shape plan - select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.groovy deleted file mode 100644 index 198b39b8583407..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query54") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Music' - and i_class = 'country' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 1 - and d_year = 1999 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1999 and d_moy = 1) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1999 and d_moy = 1) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100""" - qt_ds_shape_54 ''' - explain shape plan - with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Music' - and i_class = 'country' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 1 - and d_year = 1999 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1999 and d_moy = 1) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1999 and d_moy = 1) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.groovy deleted file mode 100644 index c1dd7cbd87be1b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.groovy +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query56") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100""" - qt_ds_shape_56 ''' - explain shape plan - with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.groovy deleted file mode 100644 index 80639ff76d3720..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100""" - qt_ds_shape_6 ''' - explain shape plan - select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.groovy deleted file mode 100644 index d34a2c3fb719c5..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.groovy +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query61") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) all_sales -order by promotions, total -limit 100""" - qt_ds_shape_61 ''' - explain shape plan - select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) all_sales -order by promotions, total -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.groovy deleted file mode 100644 index 2477c21b67ce2d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.groovy +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query68") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 3 or - household_demographics.hd_vehicle_count= 4) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Fairview','Midway') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100""" - qt_ds_shape_68 ''' - explain shape plan - select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 3 or - household_demographics.hd_vehicle_count= 4) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Fairview','Midway') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.groovy deleted file mode 100644 index 33e361246ba77c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.groovy +++ /dev/null @@ -1,252 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100""" - qt_ds_shape_8 ''' - explain shape plan - select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.groovy deleted file mode 100644 index 0502b8403fb321..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query91") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2000 -and d_moy = 12 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like 'Unknown%' -and ca_gmt_offset = -7 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc""" - qt_ds_shape_91 ''' - explain shape plan - select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2000 -and d_moy = 12 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like 'Unknown%' -and ca_gmt_offset = -7 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.groovy deleted file mode 100644 index b4e6f86e2c50bd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.groovy +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query95") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2001-4-01' and - (cast('2001-4-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'VA' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_95 ''' - explain shape plan - with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2001-4-01' and - (cast('2001-4-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'VA' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/gen_shape.py b/regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/gen_shape.py deleted file mode 100644 index 8317bd1859f261..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/gen_shape.py +++ /dev/null @@ -1,26 +0,0 @@ -# // Licensed to the Apache Software Foundation (ASF) under one -# // or more contributor license agreements. See the NOTICE file -# // distributed with this work for additional information -# // regarding copyright ownership. The ASF licenses this file -# // to you under the Apache License, Version 2.0 (the -# // "License"); you may not use this file except in compliance -# // with the License. You may obtain a copy of the License at -# // -# // http://www.apache.org/licenses/LICENSE-2.0 -# // -# // Unless required by applicable law or agreed to in writing, -# // software distributed under the License is distributed on an -# // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# // KIND, either express or implied. See the License for the -# // specific language governing permissions and limitations -# // under the License. -if __name__ == '__main__': - with open('shape.tmpl', 'r') as f: - tmpl = f.read() - for i in range(1,100): - with open('../../../../tools/tpcds-tools/queries/sf1000/query'+str(i)+'.sql', 'r') as fi: - casei = tmpl.replace('{--}', str(i)) - casei = casei.replace('{query}', fi.read().split(";")[0]) - - with open('../shape/query'+str(i)+'.groovy', 'w') as out: - out.write(casei) \ No newline at end of file diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/shape.tmpl b/regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/shape.tmpl deleted file mode 100644 index c25fd3f36b03f4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/shape.tmpl +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query{--}") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - def ds = """{query}""" - qt_ds_shape_{--} ''' - explain shape plan - {query} - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.groovy deleted file mode 100644 index aada0585602b1d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - multi_sql """ - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set parallel_fragment_exec_instance_num=8; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=true; - set enable_nereids_timeout = false; - set enable_runtime_filter_prune=false; - set runtime_filter_type=8; - set dump_nereids_memo=false; - set disable_nereids_rules=PRUNE_EMPTY_PARTITION; - """ - - qt_ds_shape_10 ''' - explain shape plan - select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Fairfield County','Campbell County','Washtenaw County','Escambia County','Cleburne County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 and 3+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 ANd 3+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 and 3+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/load.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/load.groovy deleted file mode 100644 index 14c11d3ea846f6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/load.groovy +++ /dev/null @@ -1,2548 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - - sql ''' - drop table if exists customer_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_demographics ( - cd_demo_sk bigint not null, - cd_gender char(1), - cd_marital_status char(1), - cd_education_status char(20), - cd_purchase_estimate integer, - cd_credit_rating char(10), - cd_dep_count integer, - cd_dep_employed_count integer, - cd_dep_college_count integer - ) - DUPLICATE KEY(cd_demo_sk) - DISTRIBUTED BY HASH(cd_gender) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists reason - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS reason ( - r_reason_sk bigint not null, - r_reason_id char(16) not null, - r_reason_desc char(100) - ) - DUPLICATE KEY(r_reason_sk) - DISTRIBUTED BY HASH(r_reason_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists date_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS date_dim ( - d_date_sk bigint not null, - d_date_id char(16) not null, - d_date datev2, - d_month_seq integer, - d_week_seq integer, - d_quarter_seq integer, - d_year integer, - d_dow integer, - d_moy integer, - d_dom integer, - d_qoy integer, - d_fy_year integer, - d_fy_quarter_seq integer, - d_fy_week_seq integer, - d_day_name char(9), - d_quarter_name char(6), - d_holiday char(1), - d_weekend char(1), - d_following_holiday char(1), - d_first_dom integer, - d_last_dom integer, - d_same_day_ly integer, - d_same_day_lq integer, - d_current_day char(1), - d_current_week char(1), - d_current_month char(1), - d_current_quarter char(1), - d_current_year char(1) - ) - DUPLICATE KEY(d_date_sk) - DISTRIBUTED BY HASH(d_date_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists warehouse - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS warehouse ( - w_warehouse_sk bigint not null, - w_warehouse_id char(16) not null, - w_warehouse_name varchar(20), - w_warehouse_sq_ft integer, - w_street_number char(10), - w_street_name varchar(60), - w_street_type char(15), - w_suite_number char(10), - w_city varchar(60), - w_county varchar(30), - w_state char(2), - w_zip char(10), - w_country varchar(20), - w_gmt_offset decimalv3(5,2) - ) - DUPLICATE KEY(w_warehouse_sk) - DISTRIBUTED BY HASH(w_warehouse_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_sales ( - cs_sold_date_sk bigint, - cs_item_sk bigint not null, - cs_order_number bigint not null, - cs_sold_time_sk bigint, - cs_ship_date_sk bigint, - cs_bill_customer_sk bigint, - cs_bill_cdemo_sk bigint, - cs_bill_hdemo_sk bigint, - cs_bill_addr_sk bigint, - cs_ship_customer_sk bigint, - cs_ship_cdemo_sk bigint, - cs_ship_hdemo_sk bigint, - cs_ship_addr_sk bigint, - cs_call_center_sk bigint, - cs_catalog_page_sk bigint, - cs_ship_mode_sk bigint, - cs_warehouse_sk bigint, - cs_promo_sk bigint, - cs_quantity integer, - cs_wholesale_cost decimalv3(7,2), - cs_list_price decimalv3(7,2), - cs_sales_price decimalv3(7,2), - cs_ext_discount_amt decimalv3(7,2), - cs_ext_sales_price decimalv3(7,2), - cs_ext_wholesale_cost decimalv3(7,2), - cs_ext_list_price decimalv3(7,2), - cs_ext_tax decimalv3(7,2), - cs_coupon_amt decimalv3(7,2), - cs_ext_ship_cost decimalv3(7,2), - cs_net_paid decimalv3(7,2), - cs_net_paid_inc_tax decimalv3(7,2), - cs_net_paid_inc_ship decimalv3(7,2), - cs_net_paid_inc_ship_tax decimalv3(7,2), - cs_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(cs_sold_date_sk, cs_item_sk) - DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists call_center - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS call_center ( - cc_call_center_sk bigint not null, - cc_call_center_id char(16) not null, - cc_rec_start_date datev2, - cc_rec_end_date datev2, - cc_closed_date_sk integer, - cc_open_date_sk integer, - cc_name varchar(50), - cc_class varchar(50), - cc_employees integer, - cc_sq_ft integer, - cc_hours char(20), - cc_manager varchar(40), - cc_mkt_id integer, - cc_mkt_class char(50), - cc_mkt_desc varchar(100), - cc_market_manager varchar(40), - cc_division integer, - cc_division_name varchar(50), - cc_company integer, - cc_company_name char(50), - cc_street_number char(10), - cc_street_name varchar(60), - cc_street_type char(15), - cc_suite_number char(10), - cc_city varchar(60), - cc_county varchar(30), - cc_state char(2), - cc_zip char(10), - cc_country varchar(20), - cc_gmt_offset decimalv3(5,2), - cc_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(cc_call_center_sk) - DISTRIBUTED BY HASH(cc_call_center_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists inventory - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS inventory ( - inv_date_sk bigint not null, - inv_item_sk bigint not null, - inv_warehouse_sk bigint, - inv_quantity_on_hand integer - ) - DUPLICATE KEY(inv_date_sk, inv_item_sk, inv_warehouse_sk) - DISTRIBUTED BY HASH(inv_date_sk, inv_item_sk, inv_warehouse_sk) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_returns ( - cr_item_sk bigint not null, - cr_order_number bigint not null, - cr_returned_date_sk bigint, - cr_returned_time_sk bigint, - cr_refunded_customer_sk bigint, - cr_refunded_cdemo_sk bigint, - cr_refunded_hdemo_sk bigint, - cr_refunded_addr_sk bigint, - cr_returning_customer_sk bigint, - cr_returning_cdemo_sk bigint, - cr_returning_hdemo_sk bigint, - cr_returning_addr_sk bigint, - cr_call_center_sk bigint, - cr_catalog_page_sk bigint, - cr_ship_mode_sk bigint, - cr_warehouse_sk bigint, - cr_reason_sk bigint, - cr_return_quantity integer, - cr_return_amount decimalv3(7,2), - cr_return_tax decimalv3(7,2), - cr_return_amt_inc_tax decimalv3(7,2), - cr_fee decimalv3(7,2), - cr_return_ship_cost decimalv3(7,2), - cr_refunded_cash decimalv3(7,2), - cr_reversed_charge decimalv3(7,2), - cr_store_credit decimalv3(7,2), - cr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(cr_item_sk, cr_order_number) - DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists household_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS household_demographics ( - hd_demo_sk bigint not null, - hd_income_band_sk bigint, - hd_buy_potential char(15), - hd_dep_count integer, - hd_vehicle_count integer - ) - DUPLICATE KEY(hd_demo_sk) - DISTRIBUTED BY HASH(hd_demo_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer_address - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_address ( - ca_address_sk bigint not null, - ca_address_id char(16) not null, - ca_street_number char(10), - ca_street_name varchar(60), - ca_street_type char(15), - ca_suite_number char(10), - ca_city varchar(60), - ca_county varchar(30), - ca_state char(2), - ca_zip char(10), - ca_country varchar(20), - ca_gmt_offset decimalv3(5,2), - ca_location_type char(20) - ) - DUPLICATE KEY(ca_address_sk) - DISTRIBUTED BY HASH(ca_address_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists income_band - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS income_band ( - ib_income_band_sk bigint not null, - ib_lower_bound integer, - ib_upper_bound integer - ) - DUPLICATE KEY(ib_income_band_sk) - DISTRIBUTED BY HASH(ib_income_band_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_page ( - cp_catalog_page_sk bigint not null, - cp_catalog_page_id char(16) not null, - cp_start_date_sk integer, - cp_end_date_sk integer, - cp_department varchar(50), - cp_catalog_number integer, - cp_catalog_page_number integer, - cp_description varchar(100), - cp_type varchar(100) - ) - DUPLICATE KEY(cp_catalog_page_sk) - DISTRIBUTED BY HASH(cp_catalog_page_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists item - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS item ( - i_item_sk bigint not null, - i_item_id char(16) not null, - i_rec_start_date datev2, - i_rec_end_date datev2, - i_item_desc varchar(200), - i_current_price decimalv3(7,2), - i_wholesale_cost decimalv3(7,2), - i_brand_id integer, - i_brand char(50), - i_class_id integer, - i_class char(50), - i_category_id integer, - i_category char(50), - i_manufact_id integer, - i_manufact char(50), - i_size char(20), - i_formulation char(20), - i_color char(20), - i_units char(10), - i_container char(10), - i_manager_id integer, - i_product_name char(50) - ) - DUPLICATE KEY(i_item_sk) - DISTRIBUTED BY HASH(i_item_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_returns ( - wr_item_sk bigint not null, - wr_order_number bigint not null, - wr_returned_date_sk bigint, - wr_returned_time_sk bigint, - wr_refunded_customer_sk bigint, - wr_refunded_cdemo_sk bigint, - wr_refunded_hdemo_sk bigint, - wr_refunded_addr_sk bigint, - wr_returning_customer_sk bigint, - wr_returning_cdemo_sk bigint, - wr_returning_hdemo_sk bigint, - wr_returning_addr_sk bigint, - wr_web_page_sk bigint, - wr_reason_sk bigint, - wr_return_quantity integer, - wr_return_amt decimalv3(7,2), - wr_return_tax decimalv3(7,2), - wr_return_amt_inc_tax decimalv3(7,2), - wr_fee decimalv3(7,2), - wr_return_ship_cost decimalv3(7,2), - wr_refunded_cash decimalv3(7,2), - wr_reversed_charge decimalv3(7,2), - wr_account_credit decimalv3(7,2), - wr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(wr_item_sk, wr_order_number) - DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists web_site - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_site ( - web_site_sk bigint not null, - web_site_id char(16) not null, - web_rec_start_date datev2, - web_rec_end_date datev2, - web_name varchar(50), - web_open_date_sk bigint, - web_close_date_sk bigint, - web_class varchar(50), - web_manager varchar(40), - web_mkt_id integer, - web_mkt_class varchar(50), - web_mkt_desc varchar(100), - web_market_manager varchar(40), - web_company_id integer, - web_company_name char(50), - web_street_number char(10), - web_street_name varchar(60), - web_street_type char(15), - web_suite_number char(10), - web_city varchar(60), - web_county varchar(30), - web_state char(2), - web_zip char(10), - web_country varchar(20), - web_gmt_offset decimalv3(5,2), - web_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(web_site_sk) - DISTRIBUTED BY HASH(web_site_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists promotion - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS promotion ( - p_promo_sk bigint not null, - p_promo_id char(16) not null, - p_start_date_sk bigint, - p_end_date_sk bigint, - p_item_sk bigint, - p_cost decimalv3(15,2), - p_response_targe integer, - p_promo_name char(50), - p_channel_dmail char(1), - p_channel_email char(1), - p_channel_catalog char(1), - p_channel_tv char(1), - p_channel_radio char(1), - p_channel_press char(1), - p_channel_event char(1), - p_channel_demo char(1), - p_channel_details varchar(100), - p_purpose char(15), - p_discount_active char(1) - ) - DUPLICATE KEY(p_promo_sk) - DISTRIBUTED BY HASH(p_promo_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_sales ( - ws_sold_date_sk bigint, - ws_item_sk bigint not null, - ws_order_number bigint not null, - ws_sold_time_sk bigint, - ws_ship_date_sk bigint, - ws_bill_customer_sk bigint, - ws_bill_cdemo_sk bigint, - ws_bill_hdemo_sk bigint, - ws_bill_addr_sk bigint, - ws_ship_customer_sk bigint, - ws_ship_cdemo_sk bigint, - ws_ship_hdemo_sk bigint, - ws_ship_addr_sk bigint, - ws_web_page_sk bigint, - ws_web_site_sk bigint, - ws_ship_mode_sk bigint, - ws_warehouse_sk bigint, - ws_promo_sk bigint, - ws_quantity integer, - ws_wholesale_cost decimalv3(7,2), - ws_list_price decimalv3(7,2), - ws_sales_price decimalv3(7,2), - ws_ext_discount_amt decimalv3(7,2), - ws_ext_sales_price decimalv3(7,2), - ws_ext_wholesale_cost decimalv3(7,2), - ws_ext_list_price decimalv3(7,2), - ws_ext_tax decimalv3(7,2), - ws_coupon_amt decimalv3(7,2), - ws_ext_ship_cost decimalv3(7,2), - ws_net_paid decimalv3(7,2), - ws_net_paid_inc_tax decimalv3(7,2), - ws_net_paid_inc_ship decimalv3(7,2), - ws_net_paid_inc_ship_tax decimalv3(7,2), - ws_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ws_sold_date_sk, ws_item_sk) - DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists store - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store ( - s_store_sk bigint not null, - s_store_id char(16) not null, - s_rec_start_date datev2, - s_rec_end_date datev2, - s_closed_date_sk bigint, - s_store_name varchar(50), - s_number_employees integer, - s_floor_space integer, - s_hours char(20), - s_manager varchar(40), - s_market_id integer, - s_geography_class varchar(100), - s_market_desc varchar(100), - s_market_manager varchar(40), - s_division_id integer, - s_division_name varchar(50), - s_company_id integer, - s_company_name varchar(50), - s_street_number varchar(10), - s_street_name varchar(60), - s_street_type char(15), - s_suite_number char(10), - s_city varchar(60), - s_county varchar(30), - s_state char(2), - s_zip char(10), - s_country varchar(20), - s_gmt_offset decimalv3(5,2), - s_tax_precentage decimalv3(5,2) - ) - DUPLICATE KEY(s_store_sk) - DISTRIBUTED BY HASH(s_store_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists time_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS time_dim ( - t_time_sk bigint not null, - t_time_id char(16) not null, - t_time integer, - t_hour integer, - t_minute integer, - t_second integer, - t_am_pm char(2), - t_shift char(20), - t_sub_shift char(20), - t_meal_time char(20) - ) - DUPLICATE KEY(t_time_sk) - DISTRIBUTED BY HASH(t_time_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_page ( - wp_web_page_sk bigint not null, - wp_web_page_id char(16) not null, - wp_rec_start_date datev2, - wp_rec_end_date datev2, - wp_creation_date_sk bigint, - wp_access_date_sk bigint, - wp_autogen_flag char(1), - wp_customer_sk bigint, - wp_url varchar(100), - wp_type char(50), - wp_char_count integer, - wp_link_count integer, - wp_image_count integer, - wp_max_ad_count integer - ) - DUPLICATE KEY(wp_web_page_sk) - DISTRIBUTED BY HASH(wp_web_page_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists store_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_returns ( - sr_item_sk bigint not null, - sr_ticket_number bigint not null, - sr_returned_date_sk bigint, - sr_return_time_sk bigint, - sr_customer_sk bigint, - sr_cdemo_sk bigint, - sr_hdemo_sk bigint, - sr_addr_sk bigint, - sr_store_sk bigint, - sr_reason_sk bigint, - sr_return_quantity integer, - sr_return_amt decimalv3(7,2), - sr_return_tax decimalv3(7,2), - sr_return_amt_inc_tax decimalv3(7,2), - sr_fee decimalv3(7,2), - sr_return_ship_cost decimalv3(7,2), - sr_refunded_cash decimalv3(7,2), - sr_reversed_charge decimalv3(7,2), - sr_store_credit decimalv3(7,2), - sr_net_loss decimalv3(7,2) - ) - duplicate key(sr_item_sk, sr_ticket_number) - distributed by hash (sr_item_sk, sr_ticket_number) buckets 32 - properties ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists store_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_sales ( - ss_sold_date_sk bigint, - ss_item_sk bigint not null, - ss_ticket_number bigint not null, - ss_sold_time_sk bigint, - ss_customer_sk bigint, - ss_cdemo_sk bigint, - ss_hdemo_sk bigint, - ss_addr_sk bigint, - ss_store_sk bigint, - ss_promo_sk bigint, - ss_quantity integer, - ss_wholesale_cost decimalv3(7,2), - ss_list_price decimalv3(7,2), - ss_sales_price decimalv3(7,2), - ss_ext_discount_amt decimalv3(7,2), - ss_ext_sales_price decimalv3(7,2), - ss_ext_wholesale_cost decimalv3(7,2), - ss_ext_list_price decimalv3(7,2), - ss_ext_tax decimalv3(7,2), - ss_coupon_amt decimalv3(7,2), - ss_net_paid decimalv3(7,2), - ss_net_paid_inc_tax decimalv3(7,2), - ss_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ss_sold_date_sk, ss_item_sk) - DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists ship_mode - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS ship_mode ( - sm_ship_mode_sk bigint not null, - sm_ship_mode_id char(16) not null, - sm_type char(30), - sm_code char(10), - sm_carrier char(20), - sm_contract char(20) - ) - DUPLICATE KEY(sm_ship_mode_sk) - DISTRIBUTED BY HASH(sm_ship_mode_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer ( - c_customer_sk bigint not null, - c_customer_id char(16) not null, - c_current_cdemo_sk bigint, - c_current_hdemo_sk bigint, - c_current_addr_sk bigint, - c_first_shipto_date_sk bigint, - c_first_sales_date_sk bigint, - c_salutation char(10), - c_first_name char(20), - c_last_name char(30), - c_preferred_cust_flag char(1), - c_birth_day integer, - c_birth_month integer, - c_birth_year integer, - c_birth_country varchar(20), - c_login char(13), - c_email_address char(50), - c_last_review_date_sk bigint - ) - DUPLICATE KEY(c_customer_sk) - DISTRIBUTED BY HASH(c_customer_id) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists dbgen_version - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS dbgen_version - ( - dv_version varchar(16) , - dv_create_date datev2 , - dv_create_time datetime , - dv_cmdline_args varchar(200) - ) - DUPLICATE KEY(dv_version) - DISTRIBUTED BY HASH(dv_version) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - alter table customer add constraint customer_pk primary key (c_customer_sk); - ''' - - sql ''' - alter table customer add constraint customer_uk unique (c_customer_id); - ''' - - sql ''' - alter table store_sales add constraint ss_fk foreign key(ss_customer_sk) references customer(c_customer_sk); - ''' - - sql ''' - alter table web_sales add constraint ws_fk foreign key(ws_bill_customer_sk) references customer(c_customer_sk); - ''' - - sql ''' - alter table catalog_sales add constraint cs_fk foreign key(cs_bill_customer_sk) references customer(c_customer_sk); - ''' - - sql """ - alter table customer_demographics modify column cd_dep_employed_count set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='7683200') - """ - - sql """ - alter table date_dim modify column d_day_name set stats ('row_count'='73049', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='521779') - """ - - sql """ - alter table date_dim modify column d_following_holiday set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_same_day_ly set stats ('row_count'='73049', 'ndv'='72450', 'num_nulls'='0', 'min_value'='2414657', 'max_value'='2487705', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_city set stats ('row_count'='20', 'ndv'='12', 'num_nulls'='0', 'min_value'='Fairview', 'max_value'='Shiloh', 'data_size'='183') - """ - - sql """ - alter table warehouse modify column w_street_type set stats ('row_count'='20', 'ndv'='14', 'num_nulls'='0', 'min_value'='', 'max_value'='Wy', 'data_size'='71') - """ - - sql """ - alter table catalog_sales modify column cs_call_center_sk set stats ('row_count'='1439980416', 'ndv'='42', 'num_nulls'='7199711', 'min_value'='1', 'max_value'='42', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid_inc_ship set stats ('row_count'='1439980416', 'ndv'='2505826', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='43956.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_sales_price set stats ('row_count'='1439980416', 'ndv'='29306', 'num_nulls'='7200276', 'min_value'='0.00', 'max_value'='300.00', 'data_size'='5759921664') - """ - - sql """ - alter table call_center modify column cc_class set stats ('row_count'='42', 'ndv'='3', 'num_nulls'='0', 'min_value'='large', 'max_value'='small', 'data_size'='226') - """ - - sql """ - alter table call_center modify column cc_country set stats ('row_count'='42', 'ndv'='1', 'num_nulls'='0', 'min_value'='United States', 'max_value'='United States', 'data_size'='546') - """ - - sql """ - alter table call_center modify column cc_county set stats ('row_count'='42', 'ndv'='16', 'num_nulls'='0', 'min_value'='Barrow County', 'max_value'='Williamson County', 'data_size'='627') - """ - - sql """ - alter table call_center modify column cc_mkt_class set stats ('row_count'='42', 'ndv'='36', 'num_nulls'='0', 'min_value'='A bit narrow forms matter animals. Consist', 'max_value'='Yesterday new men can make moreov', 'data_size'='1465') - """ - - sql """ - alter table call_center modify column cc_sq_ft set stats ('row_count'='42', 'ndv'='31', 'num_nulls'='0', 'min_value'='-1890660328', 'max_value'='2122480316', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_state set stats ('row_count'='42', 'ndv'='14', 'num_nulls'='0', 'min_value'='FL', 'max_value'='WV', 'data_size'='84') - """ - - sql """ - alter table inventory modify column inv_warehouse_sk set stats ('row_count'='783000000', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='6264000000') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_addr_sk set stats ('row_count'='143996756', 'ndv'='6015811', 'num_nulls'='2881609', 'min_value'='1', 'max_value'='6000000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_cash set stats ('row_count'='143996756', 'ndv'='1107525', 'num_nulls'='2879192', 'min_value'='0.00', 'max_value'='26955.24', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_cdemo_sk set stats ('row_count'='143996756', 'ndv'='1916366', 'num_nulls'='2881314', 'min_value'='1', 'max_value'='1920800', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_return_amt_inc_tax set stats ('row_count'='143996756', 'ndv'='1544502', 'num_nulls'='2881886', 'min_value'='0.00', 'max_value'='30418.06', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returning_addr_sk set stats ('row_count'='143996756', 'ndv'='6015811', 'num_nulls'='2883215', 'min_value'='1', 'max_value'='6000000', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_buy_potential set stats ('row_count'='7200', 'ndv'='6', 'num_nulls'='0', 'min_value'='0-500', 'max_value'='Unknown', 'data_size'='54000') - """ - - sql """ - alter table customer_address modify column ca_address_id set stats ('row_count'='6000000', 'ndv'='5984931', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAABAA', 'max_value'='AAAAAAAAPPPPPEAA', 'data_size'='96000000') - """ - - sql """ - alter table customer_address modify column ca_address_sk set stats ('row_count'='6000000', 'ndv'='6015811', 'num_nulls'='0', 'min_value'='1', 'max_value'='6000000', 'data_size'='48000000') - """ - - sql """ - alter table customer_address modify column ca_country set stats ('row_count'='6000000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='United States', 'data_size'='75661794') - """ - - sql """ - alter table customer_address modify column ca_location_type set stats ('row_count'='6000000', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='single family', 'data_size'='52372545') - """ - - sql """ - alter table customer_address modify column ca_street_number set stats ('row_count'='6000000', 'ndv'='1002', 'num_nulls'='0', 'min_value'='', 'max_value'='999', 'data_size'='16837336') - """ - - sql """ - alter table customer_address modify column ca_suite_number set stats ('row_count'='6000000', 'ndv'='76', 'num_nulls'='0', 'min_value'='', 'max_value'='Suite Y', 'data_size'='45911575') - """ - - sql """ - alter table catalog_page modify column cp_catalog_page_id set stats ('row_count'='30000', 'ndv'='29953', 'num_nulls'='0', 'min_value'='AAAAAAAAAAABAAAA', 'max_value'='AAAAAAAAPPPGAAAA', 'data_size'='480000') - """ - - sql """ - alter table item modify column i_rec_end_date set stats ('row_count'='300000', 'ndv'='3', 'num_nulls'='150000', 'min_value'='1999-10-27', 'max_value'='2001-10-26', 'data_size'='1200000') - """ - - sql """ - alter table web_returns modify column wr_refunded_addr_sk set stats ('row_count'='71997522', 'ndv'='6015811', 'num_nulls'='3239971', 'min_value'='1', 'max_value'='6000000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_reversed_charge set stats ('row_count'='71997522', 'ndv'='692680', 'num_nulls'='3239546', 'min_value'='0.00', 'max_value'='23194.77', 'data_size'='287990088') - """ - - sql """ - alter table web_site modify column web_state set stats ('row_count'='54', 'ndv'='18', 'num_nulls'='0', 'min_value'='AL', 'max_value'='WV', 'data_size'='108') - """ - - sql """ - alter table promotion modify column p_end_date_sk set stats ('row_count'='1500', 'ndv'='683', 'num_nulls'='18', 'min_value'='2450113', 'max_value'='2450967', 'data_size'='12000') - """ - - sql """ - alter table web_sales modify column ws_bill_hdemo_sk set stats ('row_count'='720000376', 'ndv'='7251', 'num_nulls'='180139', 'min_value'='1', 'max_value'='7200', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ext_ship_cost set stats ('row_count'='720000376', 'ndv'='567477', 'num_nulls'='180084', 'min_value'='0.00', 'max_value'='14950.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_addr_sk set stats ('row_count'='720000376', 'ndv'='6015811', 'num_nulls'='179848', 'min_value'='1', 'max_value'='6000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ship_mode_sk set stats ('row_count'='720000376', 'ndv'='20', 'num_nulls'='180017', 'min_value'='1', 'max_value'='20', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_warehouse_sk set stats ('row_count'='720000376', 'ndv'='20', 'num_nulls'='180105', 'min_value'='1', 'max_value'='20', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_company_name set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='6965') - """ - - sql """ - alter table store modify column s_gmt_offset set stats ('row_count'='1002', 'ndv'='4', 'num_nulls'='6', 'min_value'='-8.00', 'max_value'='-5.00', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_manager set stats ('row_count'='1002', 'ndv'='739', 'num_nulls'='0', 'min_value'='', 'max_value'='Zane Clifton', 'data_size'='12649') - """ - - sql """ - alter table store modify column s_street_number set stats ('row_count'='1002', 'ndv'='521', 'num_nulls'='0', 'min_value'='', 'max_value'='999', 'data_size'='2874') - """ - - sql """ - alter table time_dim modify column t_meal_time set stats ('row_count'='86400', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='lunch', 'data_size'='248400') - """ - - sql """ - alter table time_dim modify column t_time set stats ('row_count'='86400', 'ndv'='86684', 'num_nulls'='0', 'min_value'='0', 'max_value'='86399', 'data_size'='345600') - """ - - sql """ - alter table web_page modify column wp_creation_date_sk set stats ('row_count'='3000', 'ndv'='199', 'num_nulls'='33', 'min_value'='2450604', 'max_value'='2450815', 'data_size'='24000') - """ - - sql """ - alter table web_page modify column wp_customer_sk set stats ('row_count'='3000', 'ndv'='713', 'num_nulls'='2147', 'min_value'='9522', 'max_value'='11995685', 'data_size'='24000') - """ - - sql """ - alter table web_page modify column wp_max_ad_count set stats ('row_count'='3000', 'ndv'='5', 'num_nulls'='31', 'min_value'='0', 'max_value'='4', 'data_size'='12000') - """ - - sql """ - alter table web_page modify column wp_url set stats ('row_count'='3000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='http://www.foo.com', 'data_size'='53406') - """ - - sql """ - alter table store_returns modify column sr_refunded_cash set stats ('row_count'='287999764', 'ndv'='928470', 'num_nulls'='10081294', 'min_value'='0.00', 'max_value'='18173.96', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_tax set stats ('row_count'='287999764', 'ndv'='117247', 'num_nulls'='10081332', 'min_value'='0.00', 'max_value'='1682.04', 'data_size'='1151999056') - """ - - sql """ - alter table store_sales modify column ss_customer_sk set stats ('row_count'='2879987999', 'ndv'='12157481', 'num_nulls'='129590766', 'min_value'='1', 'max_value'='12000000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_hdemo_sk set stats ('row_count'='2879987999', 'ndv'='7251', 'num_nulls'='129594559', 'min_value'='1', 'max_value'='7200', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_store_sk set stats ('row_count'='2879987999', 'ndv'='499', 'num_nulls'='129572050', 'min_value'='1', 'max_value'='1000', 'data_size'='23039903992') - """ - - sql """ - alter table ship_mode modify column sm_ship_mode_id set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPAAAAAAA', 'data_size'='320') - """ - - sql """ - alter table ship_mode modify column sm_ship_mode_sk set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='160') - """ - - sql """ - alter table customer modify column c_first_name set stats ('row_count'='12000000', 'ndv'='5140', 'num_nulls'='0', 'min_value'='', 'max_value'='Zulma', 'data_size'='67593278') - """ - - sql """ - alter table customer modify column c_first_sales_date_sk set stats ('row_count'='12000000', 'ndv'='3644', 'num_nulls'='419856', 'min_value'='2448998', 'max_value'='2452648', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_first_shipto_date_sk set stats ('row_count'='12000000', 'ndv'='3644', 'num_nulls'='420769', 'min_value'='2449028', 'max_value'='2452678', 'data_size'='96000000') - """ - - sql """ - alter table customer_demographics modify column cd_dep_college_count set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='7683200') - """ - - sql """ - alter table date_dim modify column d_dow set stats ('row_count'='73049', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='73049', 'ndv'='801', 'num_nulls'='0', 'min_value'='1', 'max_value'='801', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_qoy set stats ('row_count'='73049', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_quarter_seq set stats ('row_count'='73049', 'ndv'='801', 'num_nulls'='0', 'min_value'='1', 'max_value'='801', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_street_name set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='', 'max_value'='Wilson Elm', 'data_size'='176') - """ - - sql """ - alter table warehouse modify column w_suite_number set stats ('row_count'='20', 'ndv'='18', 'num_nulls'='0', 'min_value'='', 'max_value'='Suite X', 'data_size'='150') - """ - - sql """ - alter table catalog_sales modify column cs_bill_cdemo_sk set stats ('row_count'='1439980416', 'ndv'='1916366', 'num_nulls'='7202134', 'min_value'='1', 'max_value'='1920800', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_bill_hdemo_sk set stats ('row_count'='1439980416', 'ndv'='7251', 'num_nulls'='7198837', 'min_value'='1', 'max_value'='7200', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ext_ship_cost set stats ('row_count'='1439980416', 'ndv'='573238', 'num_nulls'='7202537', 'min_value'='0.00', 'max_value'='14994.00', 'data_size'='5759921664') - """ - - sql """ - alter table call_center modify column cc_name set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='California', 'max_value'='Pacific Northwest_2', 'data_size'='572') - """ - - sql """ - alter table call_center modify column cc_street_name set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='1st', 'max_value'='Willow', 'data_size'='356') - """ - - sql """ - alter table call_center modify column cc_zip set stats ('row_count'='42', 'ndv'='19', 'num_nulls'='0', 'min_value'='18605', 'max_value'='98048', 'data_size'='210') - """ - - sql """ - alter table inventory modify column inv_quantity_on_hand set stats ('row_count'='783000000', 'ndv'='1006', 'num_nulls'='39153758', 'min_value'='0', 'max_value'='1000', 'data_size'='3132000000') - """ - - sql """ - alter table catalog_returns modify column cr_catalog_page_sk set stats ('row_count'='143996756', 'ndv'='17005', 'num_nulls'='2882502', 'min_value'='1', 'max_value'='25207', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_income_band_sk set stats ('row_count'='7200', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='57600') - """ - - sql """ - alter table catalog_page modify column cp_description set stats ('row_count'='30000', 'ndv'='30141', 'num_nulls'='0', 'min_value'='', 'max_value'='Youngsters worry both workers. Fascinating characters take cheap never alive studies. Direct, old', 'data_size'='2215634') - """ - - sql """ - alter table item modify column i_item_id set stats ('row_count'='300000', 'ndv'='150851', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPBAAA', 'data_size'='4800000') - """ - - sql """ - alter table web_returns modify column wr_account_credit set stats ('row_count'='71997522', 'ndv'='683955', 'num_nulls'='3241972', 'min_value'='0.00', 'max_value'='23166.33', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_net_loss set stats ('row_count'='71997522', 'ndv'='815608', 'num_nulls'='3240573', 'min_value'='0.50', 'max_value'='15887.84', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_return_amt set stats ('row_count'='71997522', 'ndv'='808311', 'num_nulls'='3238405', 'min_value'='0.00', 'max_value'='29191.00', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_return_amt_inc_tax set stats ('row_count'='71997522', 'ndv'='1359913', 'num_nulls'='3239765', 'min_value'='0.00', 'max_value'='30393.01', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_return_quantity set stats ('row_count'='71997522', 'ndv'='100', 'num_nulls'='3238643', 'min_value'='1', 'max_value'='100', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_returning_addr_sk set stats ('row_count'='71997522', 'ndv'='6015811', 'num_nulls'='3239658', 'min_value'='1', 'max_value'='6000000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_returning_customer_sk set stats ('row_count'='71997522', 'ndv'='12119220', 'num_nulls'='3237281', 'min_value'='1', 'max_value'='12000000', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_mkt_desc set stats ('row_count'='54', 'ndv'='38', 'num_nulls'='0', 'min_value'='Acres see else children. Mutual too', 'max_value'='Windows increase to a differences. Other parties might in', 'data_size'='3473') - """ - - sql """ - alter table web_site modify column web_mkt_id set stats ('row_count'='54', 'ndv'='6', 'num_nulls'='1', 'min_value'='1', 'max_value'='6', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_rec_end_date set stats ('row_count'='54', 'ndv'='3', 'num_nulls'='27', 'min_value'='1999-08-16', 'max_value'='2001-08-15', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_site_id set stats ('row_count'='54', 'ndv'='27', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPBAAAAAA', 'data_size'='864') - """ - - sql """ - alter table web_site modify column web_street_type set stats ('row_count'='54', 'ndv'='20', 'num_nulls'='0', 'min_value'='Ave', 'max_value'='Wy', 'data_size'='208') - """ - - sql """ - alter table promotion modify column p_channel_demo set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1479') - """ - - sql """ - alter table promotion modify column p_channel_details set stats ('row_count'='1500', 'ndv'='1490', 'num_nulls'='0', 'min_value'='', 'max_value'='Young, valuable companies watch walls. Payments can flour', 'data_size'='59126') - """ - - sql """ - alter table promotion modify column p_channel_event set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1482') - """ - - sql """ - alter table promotion modify column p_discount_active set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1473') - """ - - sql """ - alter table promotion modify column p_promo_sk set stats ('row_count'='1500', 'ndv'='1489', 'num_nulls'='0', 'min_value'='1', 'max_value'='1500', 'data_size'='12000') - """ - - sql """ - alter table promotion modify column p_purpose set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='10374') - """ - - sql """ - alter table web_sales modify column ws_bill_cdemo_sk set stats ('row_count'='720000376', 'ndv'='1916366', 'num_nulls'='179788', 'min_value'='1', 'max_value'='1920800', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_sold_date_sk set stats ('row_count'='720000376', 'ndv'='1820', 'num_nulls'='179921', 'min_value'='2450816', 'max_value'='2452642', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_web_site_sk set stats ('row_count'='720000376', 'ndv'='54', 'num_nulls'='179930', 'min_value'='1', 'max_value'='54', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_city set stats ('row_count'='1002', 'ndv'='55', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodlawn', 'data_size'='9238') - """ - - sql """ - alter table store modify column s_company_id set stats ('row_count'='1002', 'ndv'='1', 'num_nulls'='7', 'min_value'='1', 'max_value'='1', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_county set stats ('row_count'='1002', 'ndv'='28', 'num_nulls'='0', 'min_value'='', 'max_value'='Ziebach County', 'data_size'='14291') - """ - - sql """ - alter table store modify column s_geography_class set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='6972') - """ - - sql """ - alter table store modify column s_hours set stats ('row_count'='1002', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='8AM-8AM', 'data_size'='7088') - """ - - sql """ - alter table store modify column s_store_id set stats ('row_count'='1002', 'ndv'='501', 'num_nulls'='0', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPBAAAAA', 'data_size'='16032') - """ - - sql """ - alter table store modify column s_zip set stats ('row_count'='1002', 'ndv'='354', 'num_nulls'='0', 'min_value'='', 'max_value'='99454', 'data_size'='4975') - """ - - sql """ - alter table time_dim modify column t_am_pm set stats ('row_count'='86400', 'ndv'='2', 'num_nulls'='0', 'min_value'='AM', 'max_value'='PM', 'data_size'='172800') - """ - - sql """ - alter table time_dim modify column t_minute set stats ('row_count'='86400', 'ndv'='60', 'num_nulls'='0', 'min_value'='0', 'max_value'='59', 'data_size'='345600') - """ - - sql """ - alter table web_page modify column wp_web_page_id set stats ('row_count'='3000', 'ndv'='1501', 'num_nulls'='0', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPKAAAAA', 'data_size'='48000') - """ - - sql """ - alter table web_page modify column wp_web_page_sk set stats ('row_count'='3000', 'ndv'='2984', 'num_nulls'='0', 'min_value'='1', 'max_value'='3000', 'data_size'='24000') - """ - - sql """ - alter table store_returns modify column sr_return_amt set stats ('row_count'='287999764', 'ndv'='671228', 'num_nulls'='10080055', 'min_value'='0.00', 'max_value'='19434.00', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_returned_date_sk set stats ('row_count'='287999764', 'ndv'='2010', 'num_nulls'='10079607', 'min_value'='2450820', 'max_value'='2452822', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_ext_tax set stats ('row_count'='2879987999', 'ndv'='149597', 'num_nulls'='129588732', 'min_value'='0.00', 'max_value'='1797.48', 'data_size'='11519951996') - """ - - sql """ - alter table customer modify column c_current_cdemo_sk set stats ('row_count'='12000000', 'ndv'='1913901', 'num_nulls'='419895', 'min_value'='1', 'max_value'='1920800', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_customer_id set stats ('row_count'='12000000', 'ndv'='11921032', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAABAA', 'max_value'='AAAAAAAAPPPPPKAA', 'data_size'='192000000') - """ - - sql """ - alter table date_dim modify column d_current_day set stats ('row_count'='73049', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_current_month set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_date set stats ('row_count'='73049', 'ndv'='73250', 'num_nulls'='0', 'min_value'='1900-01-02', 'max_value'='2100-01-01', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_moy set stats ('row_count'='73049', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_gmt_offset set stats ('row_count'='20', 'ndv'='3', 'num_nulls'='1', 'min_value'='-7.00', 'max_value'='-5.00', 'data_size'='80') - """ - - sql """ - alter table warehouse modify column w_warehouse_sk set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='160') - """ - - sql """ - alter table warehouse modify column w_warehouse_sq_ft set stats ('row_count'='20', 'ndv'='19', 'num_nulls'='1', 'min_value'='73065', 'max_value'='977787', 'data_size'='80') - """ - - sql """ - alter table catalog_sales modify column cs_ext_sales_price set stats ('row_count'='1439980416', 'ndv'='1100662', 'num_nulls'='7199625', 'min_value'='0.00', 'max_value'='29943.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ext_wholesale_cost set stats ('row_count'='1439980416', 'ndv'='393180', 'num_nulls'='7199876', 'min_value'='1.00', 'max_value'='10000.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_item_sk set stats ('row_count'='1439980416', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid_inc_tax set stats ('row_count'='1439980416', 'ndv'='2422238', 'num_nulls'='7200702', 'min_value'='0.00', 'max_value'='32376.27', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ship_date_sk set stats ('row_count'='1439980416', 'ndv'='1933', 'num_nulls'='7200707', 'min_value'='2450817', 'max_value'='2452744', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_warehouse_sk set stats ('row_count'='1439980416', 'ndv'='20', 'num_nulls'='7200688', 'min_value'='1', 'max_value'='20', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_division set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_division_name set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='able', 'max_value'='pri', 'data_size'='164') - """ - - sql """ - alter table call_center modify column cc_manager set stats ('row_count'='42', 'ndv'='28', 'num_nulls'='0', 'min_value'='Alden Snyder', 'max_value'='Wayne Ray', 'data_size'='519') - """ - - sql """ - alter table call_center modify column cc_rec_start_date set stats ('row_count'='42', 'ndv'='4', 'num_nulls'='0', 'min_value'='1998-01-01', 'max_value'='2002-01-01', 'data_size'='168') - """ - - sql """ - alter table catalog_returns modify column cr_call_center_sk set stats ('row_count'='143996756', 'ndv'='42', 'num_nulls'='2881668', 'min_value'='1', 'max_value'='42', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_net_loss set stats ('row_count'='143996756', 'ndv'='911034', 'num_nulls'='2881704', 'min_value'='0.50', 'max_value'='16095.08', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_customer_sk set stats ('row_count'='143996756', 'ndv'='12156363', 'num_nulls'='2879017', 'min_value'='1', 'max_value'='12000000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_hdemo_sk set stats ('row_count'='143996756', 'ndv'='7251', 'num_nulls'='2882107', 'min_value'='1', 'max_value'='7200', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_returning_customer_sk set stats ('row_count'='143996756', 'ndv'='12157481', 'num_nulls'='2879023', 'min_value'='1', 'max_value'='12000000', 'data_size'='1151974048') - """ - - sql """ - alter table customer_address modify column ca_gmt_offset set stats ('row_count'='6000000', 'ndv'='6', 'num_nulls'='180219', 'min_value'='-10.00', 'max_value'='-5.00', 'data_size'='24000000') - """ - - sql """ - alter table item modify column i_color set stats ('row_count'='300000', 'ndv'='93', 'num_nulls'='0', 'min_value'='', 'max_value'='yellow', 'data_size'='1610293') - """ - - sql """ - alter table item modify column i_manufact set stats ('row_count'='300000', 'ndv'='1004', 'num_nulls'='0', 'min_value'='', 'max_value'='pripripri', 'data_size'='3379693') - """ - - sql """ - alter table item modify column i_product_name set stats ('row_count'='300000', 'ndv'='294994', 'num_nulls'='0', 'min_value'='', 'max_value'='pripripripripriought', 'data_size'='6849199') - """ - - sql """ - alter table web_returns modify column wr_returned_time_sk set stats ('row_count'='71997522', 'ndv'='87677', 'num_nulls'='3238574', 'min_value'='0', 'max_value'='86399', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_manager set stats ('row_count'='54', 'ndv'='40', 'num_nulls'='0', 'min_value'='', 'max_value'='William Young', 'data_size'='658') - """ - - sql """ - alter table web_site modify column web_mkt_class set stats ('row_count'='54', 'ndv'='40', 'num_nulls'='0', 'min_value'='', 'max_value'='Written, political plans show to the models. T', 'data_size'='1822') - """ - - sql """ - alter table web_site modify column web_rec_start_date set stats ('row_count'='54', 'ndv'='4', 'num_nulls'='2', 'min_value'='1997-08-16', 'max_value'='2001-08-16', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_street_number set stats ('row_count'='54', 'ndv'='36', 'num_nulls'='0', 'min_value'='', 'max_value'='983', 'data_size'='154') - """ - - sql """ - alter table promotion modify column p_channel_catalog set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1482') - """ - - sql """ - alter table promotion modify column p_promo_id set stats ('row_count'='1500', 'ndv'='1519', 'num_nulls'='0', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPEAAAAA', 'data_size'='24000') - """ - - sql """ - alter table web_sales modify column ws_bill_customer_sk set stats ('row_count'='720000376', 'ndv'='12103729', 'num_nulls'='179817', 'min_value'='1', 'max_value'='12000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_list_price set stats ('row_count'='720000376', 'ndv'='29396', 'num_nulls'='180053', 'min_value'='1.00', 'max_value'='300.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_sales_price set stats ('row_count'='720000376', 'ndv'='29288', 'num_nulls'='180005', 'min_value'='0.00', 'max_value'='300.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_hdemo_sk set stats ('row_count'='720000376', 'ndv'='7251', 'num_nulls'='179824', 'min_value'='1', 'max_value'='7200', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_closed_date_sk set stats ('row_count'='1002', 'ndv'='163', 'num_nulls'='729', 'min_value'='2450820', 'max_value'='2451313', 'data_size'='8016') - """ - - sql """ - alter table store modify column s_division_id set stats ('row_count'='1002', 'ndv'='1', 'num_nulls'='6', 'min_value'='1', 'max_value'='1', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_market_desc set stats ('row_count'='1002', 'ndv'='765', 'num_nulls'='0', 'min_value'='', 'max_value'='Yesterday left factors handle continuing co', 'data_size'='57638') - """ - - sql """ - alter table store modify column s_market_id set stats ('row_count'='1002', 'ndv'='10', 'num_nulls'='8', 'min_value'='1', 'max_value'='10', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_state set stats ('row_count'='1002', 'ndv'='22', 'num_nulls'='0', 'min_value'='', 'max_value'='WV', 'data_size'='1994') - """ - - sql """ - alter table store modify column s_store_sk set stats ('row_count'='1002', 'ndv'='988', 'num_nulls'='0', 'min_value'='1', 'max_value'='1002', 'data_size'='8016') - """ - - sql """ - alter table store modify column s_street_name set stats ('row_count'='1002', 'ndv'='549', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodland Oak', 'data_size'='8580') - """ - - sql """ - alter table web_page modify column wp_access_date_sk set stats ('row_count'='3000', 'ndv'='101', 'num_nulls'='31', 'min_value'='2452548', 'max_value'='2452648', 'data_size'='24000') - """ - - sql """ - alter table web_page modify column wp_char_count set stats ('row_count'='3000', 'ndv'='1883', 'num_nulls'='42', 'min_value'='303', 'max_value'='8523', 'data_size'='12000') - """ - - sql """ - alter table store_returns modify column sr_addr_sk set stats ('row_count'='287999764', 'ndv'='6015811', 'num_nulls'='10082311', 'min_value'='1', 'max_value'='6000000', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_return_time_sk set stats ('row_count'='287999764', 'ndv'='32660', 'num_nulls'='10082805', 'min_value'='28799', 'max_value'='61199', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_store_sk set stats ('row_count'='287999764', 'ndv'='499', 'num_nulls'='10081871', 'min_value'='1', 'max_value'='1000', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_coupon_amt set stats ('row_count'='2879987999', 'ndv'='1161208', 'num_nulls'='129609101', 'min_value'='0.00', 'max_value'='19778.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_sales_price set stats ('row_count'='2879987999', 'ndv'='19780', 'num_nulls'='129598061', 'min_value'='0.00', 'max_value'='200.00', 'data_size'='11519951996') - """ - - sql """ - alter table customer modify column c_birth_country set stats ('row_count'='12000000', 'ndv'='211', 'num_nulls'='0', 'min_value'='', 'max_value'='ZIMBABWE', 'data_size'='100750845') - """ - - sql """ - alter table customer modify column c_birth_month set stats ('row_count'='12000000', 'ndv'='12', 'num_nulls'='419629', 'min_value'='1', 'max_value'='12', 'data_size'='48000000') - """ - - sql """ - alter table customer modify column c_customer_sk set stats ('row_count'='12000000', 'ndv'='12157481', 'num_nulls'='0', 'min_value'='1', 'max_value'='12000000', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_email_address set stats ('row_count'='12000000', 'ndv'='11642077', 'num_nulls'='0', 'min_value'='', 'max_value'='Zulma.Young@aDhzZzCzYN.edu', 'data_size'='318077849') - """ - - sql """ - alter table customer modify column c_last_review_date_sk set stats ('row_count'='12000000', 'ndv'='366', 'num_nulls'='419900', 'min_value'='2452283', 'max_value'='2452648', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_preferred_cust_flag set stats ('row_count'='12000000', 'ndv'='3', 'num_nulls'='0', 'min_value'='', 'max_value'='Y', 'data_size'='11580510') - """ - - sql """ - alter table dbgen_version modify column dv_version set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='3.2.0', 'max_value'='3.2.0', 'data_size'='5') - """ - - sql """ - alter table customer_demographics modify column cd_purchase_estimate set stats ('row_count'='1920800', 'ndv'='20', 'num_nulls'='0', 'min_value'='500', 'max_value'='10000', 'data_size'='7683200') - """ - - sql """ - alter table reason modify column r_reason_id set stats ('row_count'='65', 'ndv'='65', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPDAAAAAA', 'data_size'='1040') - """ - - sql """ - alter table reason modify column r_reason_sk set stats ('row_count'='65', 'ndv'='65', 'num_nulls'='0', 'min_value'='1', 'max_value'='65', 'data_size'='520') - """ - - sql """ - alter table date_dim modify column d_current_week set stats ('row_count'='73049', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_first_dom set stats ('row_count'='73049', 'ndv'='2410', 'num_nulls'='0', 'min_value'='2415021', 'max_value'='2488070', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_fy_year set stats ('row_count'='73049', 'ndv'='202', 'num_nulls'='0', 'min_value'='1900', 'max_value'='2100', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_last_dom set stats ('row_count'='73049', 'ndv'='2419', 'num_nulls'='0', 'min_value'='2415020', 'max_value'='2488372', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_month_seq set stats ('row_count'='73049', 'ndv'='2398', 'num_nulls'='0', 'min_value'='0', 'max_value'='2400', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_quarter_name set stats ('row_count'='73049', 'ndv'='799', 'num_nulls'='0', 'min_value'='1900Q1', 'max_value'='2100Q1', 'data_size'='438294') - """ - - sql """ - alter table warehouse modify column w_county set stats ('row_count'='20', 'ndv'='14', 'num_nulls'='0', 'min_value'='Bronx County', 'max_value'='Ziebach County', 'data_size'='291') - """ - - sql """ - alter table warehouse modify column w_street_number set stats ('row_count'='20', 'ndv'='19', 'num_nulls'='0', 'min_value'='', 'max_value'='957', 'data_size'='54') - """ - - sql """ - alter table warehouse modify column w_warehouse_name set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='', 'max_value'='Therefore urg', 'data_size'='307') - """ - - sql """ - alter table catalog_sales modify column cs_ext_discount_amt set stats ('row_count'='1439980416', 'ndv'='1100115', 'num_nulls'='7201054', 'min_value'='0.00', 'max_value'='29982.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid_inc_ship_tax set stats ('row_count'='1439980416', 'ndv'='3312360', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='46593.36', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_promo_sk set stats ('row_count'='1439980416', 'ndv'='1489', 'num_nulls'='7202844', 'min_value'='1', 'max_value'='1500', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_call_center_id set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPBAAAAAA', 'data_size'='672') - """ - - sql """ - alter table call_center modify column cc_employees set stats ('row_count'='42', 'ndv'='30', 'num_nulls'='0', 'min_value'='69020', 'max_value'='6879074', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_suite_number set stats ('row_count'='42', 'ndv'='18', 'num_nulls'='0', 'min_value'='Suite 0', 'max_value'='Suite W', 'data_size'='326') - """ - - sql """ - alter table catalog_returns modify column cr_item_sk set stats ('row_count'='143996756', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_reason_sk set stats ('row_count'='143996756', 'ndv'='65', 'num_nulls'='2881950', 'min_value'='1', 'max_value'='65', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_return_ship_cost set stats ('row_count'='143996756', 'ndv'='483467', 'num_nulls'='2883436', 'min_value'='0.00', 'max_value'='14273.28', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_ship_mode_sk set stats ('row_count'='143996756', 'ndv'='20', 'num_nulls'='2879879', 'min_value'='1', 'max_value'='20', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_store_credit set stats ('row_count'='143996756', 'ndv'='802237', 'num_nulls'='2880469', 'min_value'='0.00', 'max_value'='23215.15', 'data_size'='575987024') - """ - - sql """ - alter table customer_address modify column ca_city set stats ('row_count'='6000000', 'ndv'='977', 'num_nulls'='0', 'min_value'='', 'max_value'='Zion', 'data_size'='52096290') - """ - - sql """ - alter table customer_address modify column ca_state set stats ('row_count'='6000000', 'ndv'='52', 'num_nulls'='0', 'min_value'='', 'max_value'='WY', 'data_size'='11640128') - """ - - sql """ - alter table customer_address modify column ca_street_name set stats ('row_count'='6000000', 'ndv'='8173', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodland Woodland', 'data_size'='50697257') - """ - - sql """ - alter table customer_address modify column ca_street_type set stats ('row_count'='6000000', 'ndv'='21', 'num_nulls'='0', 'min_value'='', 'max_value'='Wy', 'data_size'='24441630') - """ - - sql """ - alter table catalog_page modify column cp_catalog_number set stats ('row_count'='30000', 'ndv'='109', 'num_nulls'='297', 'min_value'='1', 'max_value'='109', 'data_size'='120000') - """ - - sql """ - alter table catalog_page modify column cp_catalog_page_number set stats ('row_count'='30000', 'ndv'='279', 'num_nulls'='294', 'min_value'='1', 'max_value'='277', 'data_size'='120000') - """ - - sql """ - alter table catalog_page modify column cp_catalog_page_sk set stats ('row_count'='30000', 'ndv'='30439', 'num_nulls'='0', 'min_value'='1', 'max_value'='30000', 'data_size'='240000') - """ - - sql """ - alter table catalog_page modify column cp_start_date_sk set stats ('row_count'='30000', 'ndv'='91', 'num_nulls'='286', 'min_value'='2450815', 'max_value'='2453005', 'data_size'='120000') - """ - - sql """ - alter table item modify column i_rec_start_date set stats ('row_count'='300000', 'ndv'='4', 'num_nulls'='784', 'min_value'='1997-10-27', 'max_value'='2001-10-27', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_units set stats ('row_count'='300000', 'ndv'='22', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='1253652') - """ - - sql """ - alter table web_returns modify column wr_refunded_hdemo_sk set stats ('row_count'='71997522', 'ndv'='7251', 'num_nulls'='3238545', 'min_value'='1', 'max_value'='7200', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_return_ship_cost set stats ('row_count'='71997522', 'ndv'='451263', 'num_nulls'='3239048', 'min_value'='0.00', 'max_value'='14352.10', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_returned_date_sk set stats ('row_count'='71997522', 'ndv'='2188', 'num_nulls'='3239259', 'min_value'='2450819', 'max_value'='2453002', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_returning_cdemo_sk set stats ('row_count'='71997522', 'ndv'='1916366', 'num_nulls'='3239192', 'min_value'='1', 'max_value'='1920800', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_suite_number set stats ('row_count'='54', 'ndv'='38', 'num_nulls'='0', 'min_value'='Suite 100', 'max_value'='Suite Y', 'data_size'='430') - """ - - sql """ - alter table promotion modify column p_start_date_sk set stats ('row_count'='1500', 'ndv'='685', 'num_nulls'='23', 'min_value'='2450096', 'max_value'='2450915', 'data_size'='12000') - """ - - sql """ - alter table web_sales modify column ws_coupon_amt set stats ('row_count'='720000376', 'ndv'='1505315', 'num_nulls'='179933', 'min_value'='0.00', 'max_value'='28824.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ext_wholesale_cost set stats ('row_count'='720000376', 'ndv'='393180', 'num_nulls'='180060', 'min_value'='1.00', 'max_value'='10000.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_paid_inc_ship set stats ('row_count'='720000376', 'ndv'='2414838', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='44263.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_date_sk set stats ('row_count'='720000376', 'ndv'='1952', 'num_nulls'='180011', 'min_value'='2450817', 'max_value'='2452762', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_web_page_sk set stats ('row_count'='720000376', 'ndv'='2984', 'num_nulls'='179732', 'min_value'='1', 'max_value'='3000', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_country set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='United States', 'data_size'='12961') - """ - - sql """ - alter table store modify column s_store_name set stats ('row_count'='1002', 'ndv'='11', 'num_nulls'='0', 'min_value'='', 'max_value'='pri', 'data_size'='3916') - """ - - sql """ - alter table time_dim modify column t_second set stats ('row_count'='86400', 'ndv'='60', 'num_nulls'='0', 'min_value'='0', 'max_value'='59', 'data_size'='345600') - """ - - sql """ - alter table time_dim modify column t_sub_shift set stats ('row_count'='86400', 'ndv'='4', 'num_nulls'='0', 'min_value'='afternoon', 'max_value'='night', 'data_size'='597600') - """ - - sql """ - alter table web_page modify column wp_image_count set stats ('row_count'='3000', 'ndv'='7', 'num_nulls'='26', 'min_value'='1', 'max_value'='7', 'data_size'='12000') - """ - - sql """ - alter table web_page modify column wp_type set stats ('row_count'='3000', 'ndv'='8', 'num_nulls'='0', 'min_value'='', 'max_value'='welcome', 'data_size'='18867') - """ - - sql """ - alter table store_returns modify column sr_customer_sk set stats ('row_count'='287999764', 'ndv'='12157481', 'num_nulls'='10081624', 'min_value'='1', 'max_value'='12000000', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_hdemo_sk set stats ('row_count'='287999764', 'ndv'='7251', 'num_nulls'='10083275', 'min_value'='1', 'max_value'='7200', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_addr_sk set stats ('row_count'='2879987999', 'ndv'='6015811', 'num_nulls'='129589799', 'min_value'='1', 'max_value'='6000000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_item_sk set stats ('row_count'='2879987999', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_quantity set stats ('row_count'='2879987999', 'ndv'='100', 'num_nulls'='129584258', 'min_value'='1', 'max_value'='100', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_ticket_number set stats ('row_count'='2879987999', 'ndv'='238830448', 'num_nulls'='0', 'min_value'='1', 'max_value'='240000000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_wholesale_cost set stats ('row_count'='2879987999', 'ndv'='9905', 'num_nulls'='129590273', 'min_value'='1.00', 'max_value'='100.00', 'data_size'='11519951996') - """ - - sql """ - alter table ship_mode modify column sm_type set stats ('row_count'='20', 'ndv'='6', 'num_nulls'='0', 'min_value'='EXPRESS', 'max_value'='TWO DAY', 'data_size'='150') - """ - - sql """ - alter table customer modify column c_current_addr_sk set stats ('row_count'='12000000', 'ndv'='5243359', 'num_nulls'='0', 'min_value'='3', 'max_value'='6000000', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_last_name set stats ('row_count'='12000000', 'ndv'='4990', 'num_nulls'='0', 'min_value'='', 'max_value'='Zuniga', 'data_size'='70991730') - """ - - sql """ - alter table dbgen_version modify column dv_cmdline_args set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='-SCALE 1000 -PARALLEL 64 -CHILD 1 -TERMINATE N -DIR /mnt/datadisk0/tpcds1t/tpcds-data', 'max_value'='-SCALE 1000 -PARALLEL 64 -CHILD 1 -TERMINATE N -DIR /mnt/datadisk0/tpcds1t/tpcds-data', 'data_size'='86') - """ - - sql """ - alter table date_dim modify column d_current_quarter set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_date_sk set stats ('row_count'='73049', 'ndv'='73042', 'num_nulls'='0', 'min_value'='2415022', 'max_value'='2488070', 'data_size'='584392') - """ - - sql """ - alter table date_dim modify column d_holiday set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table warehouse modify column w_country set stats ('row_count'='20', 'ndv'='1', 'num_nulls'='0', 'min_value'='United States', 'max_value'='United States', 'data_size'='260') - """ - - sql """ - alter table warehouse modify column w_state set stats ('row_count'='20', 'ndv'='13', 'num_nulls'='0', 'min_value'='AL', 'max_value'='TN', 'data_size'='40') - """ - - sql """ - alter table catalog_sales modify column cs_bill_addr_sk set stats ('row_count'='1439980416', 'ndv'='6015811', 'num_nulls'='7199539', 'min_value'='1', 'max_value'='6000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_bill_customer_sk set stats ('row_count'='1439980416', 'ndv'='12157481', 'num_nulls'='7201919', 'min_value'='1', 'max_value'='12000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid set stats ('row_count'='1439980416', 'ndv'='1809875', 'num_nulls'='7197668', 'min_value'='0.00', 'max_value'='29943.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ship_addr_sk set stats ('row_count'='1439980416', 'ndv'='6015811', 'num_nulls'='7198232', 'min_value'='1', 'max_value'='6000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ship_mode_sk set stats ('row_count'='1439980416', 'ndv'='20', 'num_nulls'='7201083', 'min_value'='1', 'max_value'='20', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_sold_date_sk set stats ('row_count'='1439980416', 'ndv'='1835', 'num_nulls'='7203326', 'min_value'='2450815', 'max_value'='2452654', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_sold_time_sk set stats ('row_count'='1439980416', 'ndv'='87677', 'num_nulls'='7201329', 'min_value'='0', 'max_value'='86399', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_wholesale_cost set stats ('row_count'='1439980416', 'ndv'='9905', 'num_nulls'='7201098', 'min_value'='1.00', 'max_value'='100.00', 'data_size'='5759921664') - """ - - sql """ - alter table call_center modify column cc_company_name set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='able', 'max_value'='pri', 'data_size'='160') - """ - - sql """ - alter table call_center modify column cc_market_manager set stats ('row_count'='42', 'ndv'='35', 'num_nulls'='0', 'min_value'='Cesar Allen', 'max_value'='William Larsen', 'data_size'='524') - """ - - sql """ - alter table call_center modify column cc_mkt_id set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_street_type set stats ('row_count'='42', 'ndv'='11', 'num_nulls'='0', 'min_value'='Avenue', 'max_value'='Way', 'data_size'='184') - """ - - sql """ - alter table catalog_returns modify column cr_return_tax set stats ('row_count'='143996756', 'ndv'='149828', 'num_nulls'='2881611', 'min_value'='0.00', 'max_value'='2511.58', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returning_cdemo_sk set stats ('row_count'='143996756', 'ndv'='1916366', 'num_nulls'='2880543', 'min_value'='1', 'max_value'='1920800', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_returning_hdemo_sk set stats ('row_count'='143996756', 'ndv'='7251', 'num_nulls'='2882692', 'min_value'='1', 'max_value'='7200', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_reversed_charge set stats ('row_count'='143996756', 'ndv'='802509', 'num_nulls'='2881215', 'min_value'='0.00', 'max_value'='24033.84', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_warehouse_sk set stats ('row_count'='143996756', 'ndv'='20', 'num_nulls'='2882192', 'min_value'='1', 'max_value'='20', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_demo_sk set stats ('row_count'='7200', 'ndv'='7251', 'num_nulls'='0', 'min_value'='1', 'max_value'='7200', 'data_size'='57600') - """ - - sql """ - alter table household_demographics modify column hd_vehicle_count set stats ('row_count'='7200', 'ndv'='6', 'num_nulls'='0', 'min_value'='-1', 'max_value'='4', 'data_size'='28800') - """ - - sql """ - alter table customer_address modify column ca_zip set stats ('row_count'='6000000', 'ndv'='9253', 'num_nulls'='0', 'min_value'='', 'max_value'='99981', 'data_size'='29097610') - """ - - sql """ - alter table income_band modify column ib_income_band_sk set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='160') - """ - - sql """ - alter table catalog_page modify column cp_type set stats ('row_count'='30000', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='quarterly', 'data_size'='227890') - """ - - sql """ - alter table item modify column i_brand set stats ('row_count'='300000', 'ndv'='714', 'num_nulls'='0', 'min_value'='', 'max_value'='univunivamalg #9', 'data_size'='4834917') - """ - - sql """ - alter table item modify column i_formulation set stats ('row_count'='300000', 'ndv'='224757', 'num_nulls'='0', 'min_value'='', 'max_value'='yellow98911509228741', 'data_size'='5984460') - """ - - sql """ - alter table item modify column i_item_desc set stats ('row_count'='300000', 'ndv'='217721', 'num_nulls'='0', 'min_value'='', 'max_value'='Youngsters used to save quite colour', 'data_size'='30093342') - """ - - sql """ - alter table web_returns modify column wr_fee set stats ('row_count'='71997522', 'ndv'='9958', 'num_nulls'='3238926', 'min_value'='0.50', 'max_value'='100.00', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_item_sk set stats ('row_count'='71997522', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_reason_sk set stats ('row_count'='71997522', 'ndv'='65', 'num_nulls'='3238897', 'min_value'='1', 'max_value'='65', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_refunded_customer_sk set stats ('row_count'='71997522', 'ndv'='12117831', 'num_nulls'='3242433', 'min_value'='1', 'max_value'='12000000', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_city set stats ('row_count'='54', 'ndv'='31', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodlawn', 'data_size'='491') - """ - - sql """ - alter table web_site modify column web_close_date_sk set stats ('row_count'='54', 'ndv'='18', 'num_nulls'='10', 'min_value'='2441265', 'max_value'='2446218', 'data_size'='432') - """ - - sql """ - alter table web_site modify column web_company_id set stats ('row_count'='54', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_company_name set stats ('row_count'='54', 'ndv'='7', 'num_nulls'='0', 'min_value'='', 'max_value'='pri', 'data_size'='203') - """ - - sql """ - alter table web_site modify column web_county set stats ('row_count'='54', 'ndv'='25', 'num_nulls'='0', 'min_value'='', 'max_value'='Williamson County', 'data_size'='762') - """ - - sql """ - alter table web_site modify column web_name set stats ('row_count'='54', 'ndv'='10', 'num_nulls'='0', 'min_value'='', 'max_value'='site_8', 'data_size'='312') - """ - - sql """ - alter table web_site modify column web_open_date_sk set stats ('row_count'='54', 'ndv'='27', 'num_nulls'='1', 'min_value'='2450373', 'max_value'='2450807', 'data_size'='432') - """ - - sql """ - alter table promotion modify column p_channel_dmail set stats ('row_count'='1500', 'ndv'='3', 'num_nulls'='0', 'min_value'='', 'max_value'='Y', 'data_size'='1483') - """ - - sql """ - alter table promotion modify column p_channel_press set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1481') - """ - - sql """ - alter table promotion modify column p_channel_radio set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1479') - """ - - sql """ - alter table promotion modify column p_cost set stats ('row_count'='1500', 'ndv'='1', 'num_nulls'='18', 'min_value'='1000.00', 'max_value'='1000.00', 'data_size'='12000') - """ - - sql """ - alter table web_sales modify column ws_ext_tax set stats ('row_count'='720000376', 'ndv'='211413', 'num_nulls'='179695', 'min_value'='0.00', 'max_value'='2682.90', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_item_sk set stats ('row_count'='720000376', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_net_paid set stats ('row_count'='720000376', 'ndv'='1749360', 'num_nulls'='179970', 'min_value'='0.00', 'max_value'='29810.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_paid_inc_ship_tax set stats ('row_count'='720000376', 'ndv'='3224829', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='46004.19', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_paid_inc_tax set stats ('row_count'='720000376', 'ndv'='2354996', 'num_nulls'='179972', 'min_value'='0.00', 'max_value'='32492.90', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_order_number set stats ('row_count'='720000376', 'ndv'='60401176', 'num_nulls'='0', 'min_value'='1', 'max_value'='60000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_quantity set stats ('row_count'='720000376', 'ndv'='100', 'num_nulls'='179781', 'min_value'='1', 'max_value'='100', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_cdemo_sk set stats ('row_count'='720000376', 'ndv'='1916366', 'num_nulls'='180290', 'min_value'='1', 'max_value'='1920800', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_sold_time_sk set stats ('row_count'='720000376', 'ndv'='87677', 'num_nulls'='179980', 'min_value'='0', 'max_value'='86399', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_street_type set stats ('row_count'='1002', 'ndv'='21', 'num_nulls'='0', 'min_value'='', 'max_value'='Wy', 'data_size'='4189') - """ - - sql """ - alter table web_page modify column wp_autogen_flag set stats ('row_count'='3000', 'ndv'='3', 'num_nulls'='0', 'min_value'='', 'max_value'='Y', 'data_size'='2962') - """ - - sql """ - alter table web_page modify column wp_rec_start_date set stats ('row_count'='3000', 'ndv'='4', 'num_nulls'='29', 'min_value'='1997-09-03', 'max_value'='2001-09-03', 'data_size'='12000') - """ - - sql """ - alter table store_returns modify column sr_net_loss set stats ('row_count'='287999764', 'ndv'='714210', 'num_nulls'='10080716', 'min_value'='0.50', 'max_value'='10776.08', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_amt_inc_tax set stats ('row_count'='287999764', 'ndv'='1259368', 'num_nulls'='10076879', 'min_value'='0.00', 'max_value'='20454.63', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_quantity set stats ('row_count'='287999764', 'ndv'='100', 'num_nulls'='10082815', 'min_value'='1', 'max_value'='100', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_ship_cost set stats ('row_count'='287999764', 'ndv'='355844', 'num_nulls'='10081927', 'min_value'='0.00', 'max_value'='9767.34', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_reversed_charge set stats ('row_count'='287999764', 'ndv'='700618', 'num_nulls'='10085976', 'min_value'='0.00', 'max_value'='17339.42', 'data_size'='1151999056') - """ - - sql """ - alter table store_sales modify column ss_net_paid_inc_tax set stats ('row_count'='2879987999', 'ndv'='1681767', 'num_nulls'='129609050', 'min_value'='0.00', 'max_value'='21769.48', 'data_size'='11519951996') - """ - - sql """ - alter table customer modify column c_birth_day set stats ('row_count'='12000000', 'ndv'='31', 'num_nulls'='420361', 'min_value'='1', 'max_value'='31', 'data_size'='48000000') - """ - - sql """ - alter table customer_demographics modify column cd_credit_rating set stats ('row_count'='1920800', 'ndv'='4', 'num_nulls'='0', 'min_value'='Good', 'max_value'='Unknown', 'data_size'='13445600') - """ - - sql """ - alter table customer_demographics modify column cd_demo_sk set stats ('row_count'='1920800', 'ndv'='1916366', 'num_nulls'='0', 'min_value'='1', 'max_value'='1920800', 'data_size'='15366400') - """ - - sql """ - alter table customer_demographics modify column cd_dep_count set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='7683200') - """ - - sql """ - alter table customer_demographics modify column cd_education_status set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='2 yr Degree', 'max_value'='Unknown', 'data_size'='18384800') - """ - - sql """ - alter table customer_demographics modify column cd_gender set stats ('row_count'='1920800', 'ndv'='2', 'num_nulls'='0', 'min_value'='F', 'max_value'='M', 'data_size'='1920800') - """ - - sql """ - alter table customer_demographics modify column cd_marital_status set stats ('row_count'='1920800', 'ndv'='5', 'num_nulls'='0', 'min_value'='D', 'max_value'='W', 'data_size'='1920800') - """ - - sql """ - alter table date_dim modify column d_date_id set stats ('row_count'='73049', 'ndv'='72907', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAFCAA', 'max_value'='AAAAAAAAPPPPECAA', 'data_size'='1168784') - """ - - sql """ - alter table date_dim modify column d_fy_week_seq set stats ('row_count'='73049', 'ndv'='10448', 'num_nulls'='0', 'min_value'='1', 'max_value'='10436', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_year set stats ('row_count'='73049', 'ndv'='202', 'num_nulls'='0', 'min_value'='1900', 'max_value'='2100', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_warehouse_id set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPAAAAAAA', 'data_size'='320') - """ - - sql """ - alter table catalog_sales modify column cs_ext_list_price set stats ('row_count'='1439980416', 'ndv'='1160303', 'num_nulls'='7199542', 'min_value'='1.00', 'max_value'='30000.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ext_tax set stats ('row_count'='1439980416', 'ndv'='215267', 'num_nulls'='7200412', 'min_value'='0.00', 'max_value'='2673.27', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_quantity set stats ('row_count'='1439980416', 'ndv'='100', 'num_nulls'='7202885', 'min_value'='1', 'max_value'='100', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ship_cdemo_sk set stats ('row_count'='1439980416', 'ndv'='1916366', 'num_nulls'='7200151', 'min_value'='1', 'max_value'='1920800', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ship_customer_sk set stats ('row_count'='1439980416', 'ndv'='12157481', 'num_nulls'='7201507', 'min_value'='1', 'max_value'='12000000', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_company set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_mkt_desc set stats ('row_count'='42', 'ndv'='33', 'num_nulls'='0', 'min_value'='Arms increase controversial, present so', 'max_value'='Young tests could buy comfortable, local users; o', 'data_size'='2419') - """ - - sql """ - alter table call_center modify column cc_open_date_sk set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='2450794', 'max_value'='2451146', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_rec_end_date set stats ('row_count'='42', 'ndv'='3', 'num_nulls'='21', 'min_value'='2000-01-01', 'max_value'='2001-12-31', 'data_size'='168') - """ - - sql """ - alter table catalog_returns modify column cr_order_number set stats ('row_count'='143996756', 'ndv'='93476424', 'num_nulls'='0', 'min_value'='2', 'max_value'='160000000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_return_amount set stats ('row_count'='143996756', 'ndv'='882831', 'num_nulls'='2880424', 'min_value'='0.00', 'max_value'='28805.04', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returned_date_sk set stats ('row_count'='143996756', 'ndv'='2108', 'num_nulls'='0', 'min_value'='2450821', 'max_value'='2452924', 'data_size'='1151974048') - """ - - sql """ - alter table income_band modify column ib_upper_bound set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='10000', 'max_value'='200000', 'data_size'='80') - """ - - sql """ - alter table catalog_page modify column cp_department set stats ('row_count'='30000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='DEPARTMENT', 'data_size'='297110') - """ - - sql """ - alter table catalog_page modify column cp_end_date_sk set stats ('row_count'='30000', 'ndv'='97', 'num_nulls'='302', 'min_value'='2450844', 'max_value'='2453186', 'data_size'='120000') - """ - - sql """ - alter table item modify column i_brand_id set stats ('row_count'='300000', 'ndv'='951', 'num_nulls'='763', 'min_value'='1001001', 'max_value'='10016017', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_category set stats ('row_count'='300000', 'ndv'='11', 'num_nulls'='0', 'min_value'='', 'max_value'='Women', 'data_size'='1766742') - """ - - sql """ - alter table item modify column i_class_id set stats ('row_count'='300000', 'ndv'='16', 'num_nulls'='722', 'min_value'='1', 'max_value'='16', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_item_sk set stats ('row_count'='300000', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='2400000') - """ - - sql """ - alter table item modify column i_manufact_id set stats ('row_count'='300000', 'ndv'='1005', 'num_nulls'='761', 'min_value'='1', 'max_value'='1000', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_wholesale_cost set stats ('row_count'='300000', 'ndv'='7243', 'num_nulls'='740', 'min_value'='0.02', 'max_value'='89.49', 'data_size'='1200000') - """ - - sql """ - alter table web_returns modify column wr_refunded_cdemo_sk set stats ('row_count'='71997522', 'ndv'='1916366', 'num_nulls'='3240352', 'min_value'='1', 'max_value'='1920800', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_return_tax set stats ('row_count'='71997522', 'ndv'='137392', 'num_nulls'='3237729', 'min_value'='0.00', 'max_value'='2551.16', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_returning_hdemo_sk set stats ('row_count'='71997522', 'ndv'='7251', 'num_nulls'='3238239', 'min_value'='1', 'max_value'='7200', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_web_page_sk set stats ('row_count'='71997522', 'ndv'='2984', 'num_nulls'='3240387', 'min_value'='1', 'max_value'='3000', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_class set stats ('row_count'='54', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='371') - """ - - sql """ - alter table web_site modify column web_zip set stats ('row_count'='54', 'ndv'='32', 'num_nulls'='0', 'min_value'='14593', 'max_value'='99431', 'data_size'='270') - """ - - sql """ - alter table promotion modify column p_channel_email set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1480') - """ - - sql """ - alter table promotion modify column p_item_sk set stats ('row_count'='1500', 'ndv'='1467', 'num_nulls'='19', 'min_value'='184', 'max_value'='299990', 'data_size'='12000') - """ - - sql """ - alter table promotion modify column p_promo_name set stats ('row_count'='1500', 'ndv'='11', 'num_nulls'='0', 'min_value'='', 'max_value'='pri', 'data_size'='5896') - """ - - sql """ - alter table web_sales modify column ws_ext_discount_amt set stats ('row_count'='720000376', 'ndv'='1093513', 'num_nulls'='179851', 'min_value'='0.00', 'max_value'='29982.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ext_list_price set stats ('row_count'='720000376', 'ndv'='1160303', 'num_nulls'='179866', 'min_value'='1.00', 'max_value'='30000.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_wholesale_cost set stats ('row_count'='720000376', 'ndv'='9905', 'num_nulls'='179834', 'min_value'='1.00', 'max_value'='100.00', 'data_size'='2880001504') - """ - - sql """ - alter table store modify column s_market_manager set stats ('row_count'='1002', 'ndv'='732', 'num_nulls'='0', 'min_value'='', 'max_value'='Zane Perez', 'data_size'='12823') - """ - - sql """ - alter table store modify column s_number_employees set stats ('row_count'='1002', 'ndv'='101', 'num_nulls'='8', 'min_value'='200', 'max_value'='300', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_rec_end_date set stats ('row_count'='1002', 'ndv'='3', 'num_nulls'='501', 'min_value'='1999-03-13', 'max_value'='2001-03-12', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_rec_start_date set stats ('row_count'='1002', 'ndv'='4', 'num_nulls'='7', 'min_value'='1997-03-13', 'max_value'='2001-03-13', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_suite_number set stats ('row_count'='1002', 'ndv'='76', 'num_nulls'='0', 'min_value'='', 'max_value'='Suite Y', 'data_size'='7866') - """ - - sql """ - alter table time_dim modify column t_hour set stats ('row_count'='86400', 'ndv'='24', 'num_nulls'='0', 'min_value'='0', 'max_value'='23', 'data_size'='345600') - """ - - sql """ - alter table time_dim modify column t_shift set stats ('row_count'='86400', 'ndv'='3', 'num_nulls'='0', 'min_value'='first', 'max_value'='third', 'data_size'='460800') - """ - - sql """ - alter table web_page modify column wp_link_count set stats ('row_count'='3000', 'ndv'='24', 'num_nulls'='27', 'min_value'='2', 'max_value'='25', 'data_size'='12000') - """ - - sql """ - alter table web_page modify column wp_rec_end_date set stats ('row_count'='3000', 'ndv'='3', 'num_nulls'='1500', 'min_value'='1999-09-03', 'max_value'='2001-09-02', 'data_size'='12000') - """ - - sql """ - alter table store_returns modify column sr_cdemo_sk set stats ('row_count'='287999764', 'ndv'='1916366', 'num_nulls'='10076902', 'min_value'='1', 'max_value'='1920800', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_item_sk set stats ('row_count'='287999764', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_cdemo_sk set stats ('row_count'='2879987999', 'ndv'='1916366', 'num_nulls'='129602155', 'min_value'='1', 'max_value'='1920800', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_ext_discount_amt set stats ('row_count'='2879987999', 'ndv'='1161208', 'num_nulls'='129609101', 'min_value'='0.00', 'max_value'='19778.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_ext_wholesale_cost set stats ('row_count'='2879987999', 'ndv'='393180', 'num_nulls'='129595018', 'min_value'='1.00', 'max_value'='10000.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_list_price set stats ('row_count'='2879987999', 'ndv'='19640', 'num_nulls'='129597020', 'min_value'='1.00', 'max_value'='200.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_net_paid set stats ('row_count'='2879987999', 'ndv'='1288646', 'num_nulls'='129599407', 'min_value'='0.00', 'max_value'='19972.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_sold_date_sk set stats ('row_count'='2879987999', 'ndv'='1820', 'num_nulls'='129600843', 'min_value'='2450816', 'max_value'='2452642', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_sold_time_sk set stats ('row_count'='2879987999', 'ndv'='47252', 'num_nulls'='129593012', 'min_value'='28800', 'max_value'='75599', 'data_size'='23039903992') - """ - - sql """ - alter table ship_mode modify column sm_carrier set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='AIRBORNE', 'max_value'='ZOUROS', 'data_size'='133') - """ - - sql """ - alter table customer modify column c_birth_year set stats ('row_count'='12000000', 'ndv'='69', 'num_nulls'='419584', 'min_value'='1924', 'max_value'='1992', 'data_size'='48000000') - """ - - sql """ - alter table customer modify column c_login set stats ('row_count'='12000000', 'ndv'='1', 'num_nulls'='0', 'min_value'='', 'max_value'='', 'data_size'='0') - """ - - sql """ - alter table customer modify column c_salutation set stats ('row_count'='12000000', 'ndv'='7', 'num_nulls'='0', 'min_value'='', 'max_value'='Sir', 'data_size'='37544445') - """ - - sql """ - alter table reason modify column r_reason_desc set stats ('row_count'='65', 'ndv'='64', 'num_nulls'='0', 'min_value'='Did not fit', 'max_value'='unauthoized purchase', 'data_size'='848') - """ - - sql """ - alter table date_dim modify column d_current_year set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_dom set stats ('row_count'='73049', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_same_day_lq set stats ('row_count'='73049', 'ndv'='72231', 'num_nulls'='0', 'min_value'='2414930', 'max_value'='2487978', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_week_seq set stats ('row_count'='73049', 'ndv'='10448', 'num_nulls'='0', 'min_value'='1', 'max_value'='10436', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_weekend set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table warehouse modify column w_zip set stats ('row_count'='20', 'ndv'='18', 'num_nulls'='0', 'min_value'='19231', 'max_value'='89275', 'data_size'='100') - """ - - sql """ - alter table catalog_sales modify column cs_catalog_page_sk set stats ('row_count'='1439980416', 'ndv'='17005', 'num_nulls'='7199032', 'min_value'='1', 'max_value'='25207', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_coupon_amt set stats ('row_count'='1439980416', 'ndv'='1578778', 'num_nulls'='7198116', 'min_value'='0.00', 'max_value'='28730.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_list_price set stats ('row_count'='1439980416', 'ndv'='29396', 'num_nulls'='7201549', 'min_value'='1.00', 'max_value'='300.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_net_profit set stats ('row_count'='1439980416', 'ndv'='2058398', 'num_nulls'='0', 'min_value'='-10000.00', 'max_value'='19962.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_order_number set stats ('row_count'='1439980416', 'ndv'='159051824', 'num_nulls'='0', 'min_value'='1', 'max_value'='160000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ship_hdemo_sk set stats ('row_count'='1439980416', 'ndv'='7251', 'num_nulls'='7201542', 'min_value'='1', 'max_value'='7200', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_call_center_sk set stats ('row_count'='42', 'ndv'='42', 'num_nulls'='0', 'min_value'='1', 'max_value'='42', 'data_size'='336') - """ - - sql """ - alter table call_center modify column cc_city set stats ('row_count'='42', 'ndv'='17', 'num_nulls'='0', 'min_value'='Antioch', 'max_value'='Spring Hill', 'data_size'='386') - """ - - sql """ - alter table call_center modify column cc_closed_date_sk set stats ('row_count'='42', 'ndv'='0', 'num_nulls'='42', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_gmt_offset set stats ('row_count'='42', 'ndv'='4', 'num_nulls'='0', 'min_value'='-8.00', 'max_value'='-5.00', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_hours set stats ('row_count'='42', 'ndv'='3', 'num_nulls'='0', 'min_value'='8AM-12AM', 'max_value'='8AM-8AM', 'data_size'='300') - """ - - sql """ - alter table call_center modify column cc_street_number set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='38', 'max_value'='999', 'data_size'='120') - """ - - sql """ - alter table call_center modify column cc_tax_percentage set stats ('row_count'='42', 'ndv'='12', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='0.12', 'data_size'='168') - """ - - sql """ - alter table inventory modify column inv_date_sk set stats ('row_count'='783000000', 'ndv'='261', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2452635', 'data_size'='6264000000') - """ - - sql """ - alter table inventory modify column inv_item_sk set stats ('row_count'='783000000', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='6264000000') - """ - - sql """ - alter table catalog_returns modify column cr_fee set stats ('row_count'='143996756', 'ndv'='9958', 'num_nulls'='2882168', 'min_value'='0.50', 'max_value'='100.00', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_return_quantity set stats ('row_count'='143996756', 'ndv'='100', 'num_nulls'='2878774', 'min_value'='1', 'max_value'='100', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returned_time_sk set stats ('row_count'='143996756', 'ndv'='87677', 'num_nulls'='0', 'min_value'='0', 'max_value'='86399', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_dep_count set stats ('row_count'='7200', 'ndv'='10', 'num_nulls'='0', 'min_value'='0', 'max_value'='9', 'data_size'='28800') - """ - - sql """ - alter table customer_address modify column ca_county set stats ('row_count'='6000000', 'ndv'='1825', 'num_nulls'='0', 'min_value'='', 'max_value'='Ziebach County', 'data_size'='81254984') - """ - - sql """ - alter table income_band modify column ib_lower_bound set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='0', 'max_value'='190001', 'data_size'='80') - """ - - sql """ - alter table item modify column i_category_id set stats ('row_count'='300000', 'ndv'='10', 'num_nulls'='766', 'min_value'='1', 'max_value'='10', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_class set stats ('row_count'='300000', 'ndv'='100', 'num_nulls'='0', 'min_value'='', 'max_value'='womens watch', 'data_size'='2331199') - """ - - sql """ - alter table item modify column i_container set stats ('row_count'='300000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='2094652') - """ - - sql """ - alter table item modify column i_current_price set stats ('row_count'='300000', 'ndv'='9685', 'num_nulls'='775', 'min_value'='0.09', 'max_value'='99.99', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_manager_id set stats ('row_count'='300000', 'ndv'='100', 'num_nulls'='744', 'min_value'='1', 'max_value'='100', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_size set stats ('row_count'='300000', 'ndv'='8', 'num_nulls'='0', 'min_value'='', 'max_value'='small', 'data_size'='1296134') - """ - - sql """ - alter table web_returns modify column wr_order_number set stats ('row_count'='71997522', 'ndv'='42383708', 'num_nulls'='0', 'min_value'='1', 'max_value'='60000000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_refunded_cash set stats ('row_count'='71997522', 'ndv'='955369', 'num_nulls'='3240493', 'min_value'='0.00', 'max_value'='26992.92', 'data_size'='287990088') - """ - - sql """ - alter table web_site modify column web_country set stats ('row_count'='54', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='United States', 'data_size'='689') - """ - - sql """ - alter table web_site modify column web_gmt_offset set stats ('row_count'='54', 'ndv'='4', 'num_nulls'='1', 'min_value'='-8.00', 'max_value'='-5.00', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_market_manager set stats ('row_count'='54', 'ndv'='46', 'num_nulls'='0', 'min_value'='', 'max_value'='Zachery Oneil', 'data_size'='691') - """ - - sql """ - alter table web_site modify column web_site_sk set stats ('row_count'='54', 'ndv'='54', 'num_nulls'='0', 'min_value'='1', 'max_value'='54', 'data_size'='432') - """ - - sql """ - alter table web_site modify column web_street_name set stats ('row_count'='54', 'ndv'='53', 'num_nulls'='0', 'min_value'='', 'max_value'='Wilson Ridge', 'data_size'='471') - """ - - sql """ - alter table web_site modify column web_tax_percentage set stats ('row_count'='54', 'ndv'='13', 'num_nulls'='1', 'min_value'='0.00', 'max_value'='0.12', 'data_size'='216') - """ - - sql """ - alter table promotion modify column p_channel_tv set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1481') - """ - - sql """ - alter table promotion modify column p_response_targe set stats ('row_count'='1500', 'ndv'='1', 'num_nulls'='27', 'min_value'='1', 'max_value'='1', 'data_size'='6000') - """ - - sql """ - alter table web_sales modify column ws_bill_addr_sk set stats ('row_count'='720000376', 'ndv'='6015742', 'num_nulls'='179648', 'min_value'='1', 'max_value'='6000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ext_sales_price set stats ('row_count'='720000376', 'ndv'='1091003', 'num_nulls'='180023', 'min_value'='0.00', 'max_value'='29810.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_profit set stats ('row_count'='720000376', 'ndv'='2014057', 'num_nulls'='0', 'min_value'='-10000.00', 'max_value'='19840.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_promo_sk set stats ('row_count'='720000376', 'ndv'='1489', 'num_nulls'='180016', 'min_value'='1', 'max_value'='1500', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ship_customer_sk set stats ('row_count'='720000376', 'ndv'='12074547', 'num_nulls'='179966', 'min_value'='1', 'max_value'='12000000', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_division_name set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='6965') - """ - - sql """ - alter table store modify column s_floor_space set stats ('row_count'='1002', 'ndv'='752', 'num_nulls'='6', 'min_value'='5002549', 'max_value'='9997773', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_tax_precentage set stats ('row_count'='1002', 'ndv'='12', 'num_nulls'='8', 'min_value'='0.00', 'max_value'='0.11', 'data_size'='4008') - """ - - sql """ - alter table time_dim modify column t_time_id set stats ('row_count'='86400', 'ndv'='85663', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPAAAA', 'data_size'='1382400') - """ - - sql """ - alter table time_dim modify column t_time_sk set stats ('row_count'='86400', 'ndv'='87677', 'num_nulls'='0', 'min_value'='0', 'max_value'='86399', 'data_size'='691200') - """ - - sql """ - alter table store_returns modify column sr_fee set stats ('row_count'='287999764', 'ndv'='9958', 'num_nulls'='10081860', 'min_value'='0.50', 'max_value'='100.00', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_reason_sk set stats ('row_count'='287999764', 'ndv'='65', 'num_nulls'='10087936', 'min_value'='1', 'max_value'='65', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_store_credit set stats ('row_count'='287999764', 'ndv'='698161', 'num_nulls'='10077188', 'min_value'='0.00', 'max_value'='17792.48', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_ticket_number set stats ('row_count'='287999764', 'ndv'='168770768', 'num_nulls'='0', 'min_value'='1', 'max_value'='240000000', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_ext_list_price set stats ('row_count'='2879987999', 'ndv'='770971', 'num_nulls'='129593800', 'min_value'='1.00', 'max_value'='20000.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_ext_sales_price set stats ('row_count'='2879987999', 'ndv'='754248', 'num_nulls'='129589177', 'min_value'='0.00', 'max_value'='19972.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_net_profit set stats ('row_count'='2879987999', 'ndv'='1497362', 'num_nulls'='129572933', 'min_value'='-10000.00', 'max_value'='9986.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_promo_sk set stats ('row_count'='2879987999', 'ndv'='1489', 'num_nulls'='129597096', 'min_value'='1', 'max_value'='1500', 'data_size'='23039903992') - """ - - sql """ - alter table ship_mode modify column sm_code set stats ('row_count'='20', 'ndv'='4', 'num_nulls'='0', 'min_value'='AIR', 'max_value'='SURFACE', 'data_size'='87') - """ - - sql """ - alter table ship_mode modify column sm_contract set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='2mM8l', 'max_value'='yVfotg7Tio3MVhBg6Bkn', 'data_size'='252') - """ - - sql """ - alter table customer modify column c_current_hdemo_sk set stats ('row_count'='12000000', 'ndv'='7251', 'num_nulls'='418736', 'min_value'='1', 'max_value'='7200', 'data_size'='96000000') - """ - - sql """ - alter table dbgen_version modify column dv_create_date set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='2023-07-06', 'max_value'='2023-07-06', 'data_size'='4') - """ - - sql """ - alter table dbgen_version modify column dv_create_time set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='2017-05-13 00:00:00', 'max_value'='2017-05-13 00:00:00', 'data_size'='8') - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query1.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query1.groovy deleted file mode 100644 index 52a88c9c294b1b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query1.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'TN' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100""" - qt_ds_shape_1 ''' - explain shape plan - with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'TN' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query10.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query10.groovy deleted file mode 100644 index edff37bb8d673a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query10.groovy +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Fairfield County','Campbell County','Washtenaw County','Escambia County','Cleburne County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 and 3+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 ANd 3+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 and 3+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count -limit 100""" - qt_ds_shape_10 ''' - explain shape plan - select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Fairfield County','Campbell County','Washtenaw County','Escambia County','Cleburne County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 and 3+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 ANd 3+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 and 3+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query11.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query11.groovy deleted file mode 100644 index 5b659ebd3d24ff..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query11.groovy +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ss_ext_list_price-ss_ext_discount_amt) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ws_ext_list_price-ws_ext_discount_amt) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_email_address - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1998 - and t_s_secyear.dyear = 1998+1 - and t_w_firstyear.dyear = 1998 - and t_w_secyear.dyear = 1998+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else 0.0 end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else 0.0 end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_email_address -limit 100""" - qt_ds_shape_11 ''' - explain shape plan - with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ss_ext_list_price-ss_ext_discount_amt) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ws_ext_list_price-ws_ext_discount_amt) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_email_address - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1998 - and t_s_secyear.dyear = 1998+1 - and t_w_firstyear.dyear = 1998 - and t_w_secyear.dyear = 1998+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else 0.0 end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else 0.0 end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_email_address -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query12.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query12.groovy deleted file mode 100644 index 90f49e49c24c7e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query12.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ws_ext_sales_price) as itemrevenue - ,sum(ws_ext_sales_price)*100/sum(sum(ws_ext_sales_price)) over - (partition by i_class) as revenueratio -from - web_sales - ,item - ,date_dim -where - ws_item_sk = i_item_sk - and i_category in ('Men', 'Books', 'Electronics') - and ws_sold_date_sk = d_date_sk - and d_date between cast('2001-06-15' as date) - and (cast('2001-06-15' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100""" - qt_ds_shape_12 ''' - explain shape plan - select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ws_ext_sales_price) as itemrevenue - ,sum(ws_ext_sales_price)*100/sum(sum(ws_ext_sales_price)) over - (partition by i_class) as revenueratio -from - web_sales - ,item - ,date_dim -where - ws_item_sk = i_item_sk - and i_category in ('Men', 'Books', 'Electronics') - and ws_sold_date_sk = d_date_sk - and d_date between cast('2001-06-15' as date) - and (cast('2001-06-15' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query13.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query13.groovy deleted file mode 100644 index 64da9d0bcd9de2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query13.groovy +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = 'College' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Primary' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'W' - and cd_education_status = '2 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('IL', 'TN', 'TX') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('WY', 'OH', 'ID') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MS', 'SC', 'IA') - and ss_net_profit between 50 and 250 - )) -""" - qt_ds_shape_13 ''' - explain shape plan - select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = 'College' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Primary' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'W' - and cd_education_status = '2 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('IL', 'TN', 'TX') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('WY', 'OH', 'ID') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MS', 'SC', 'IA') - and ss_net_profit between 50 and 250 - )) - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query14.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query14.groovy deleted file mode 100644 index 07ef2b99688004..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query14.groovy +++ /dev/null @@ -1,245 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with cross_items as - (select i_item_sk ss_item_sk - from item, - (select iss.i_brand_id brand_id - ,iss.i_class_id class_id - ,iss.i_category_id category_id - from store_sales - ,item iss - ,date_dim d1 - where ss_item_sk = iss.i_item_sk - and ss_sold_date_sk = d1.d_date_sk - and d1.d_year between 1999 AND 1999 + 2 - intersect - select ics.i_brand_id - ,ics.i_class_id - ,ics.i_category_id - from catalog_sales - ,item ics - ,date_dim d2 - where cs_item_sk = ics.i_item_sk - and cs_sold_date_sk = d2.d_date_sk - and d2.d_year between 1999 AND 1999 + 2 - intersect - select iws.i_brand_id - ,iws.i_class_id - ,iws.i_category_id - from web_sales - ,item iws - ,date_dim d3 - where ws_item_sk = iws.i_item_sk - and ws_sold_date_sk = d3.d_date_sk - and d3.d_year between 1999 AND 1999 + 2) - t where i_brand_id = brand_id - and i_class_id = class_id - and i_category_id = category_id -), -avg_sales as - (select avg(quantity*list_price) average_sales - from (select ss_quantity quantity - ,ss_list_price list_price - from store_sales - ,date_dim - where ss_sold_date_sk = d_date_sk - and d_year between 1999 and 1999 + 2 - union all - select cs_quantity quantity - ,cs_list_price list_price - from catalog_sales - ,date_dim - where cs_sold_date_sk = d_date_sk - and d_year between 1999 and 1999 + 2 - union all - select ws_quantity quantity - ,ws_list_price list_price - from web_sales - ,date_dim - where ws_sold_date_sk = d_date_sk - and d_year between 1999 and 1999 + 2) x) - select channel, i_brand_id,i_class_id,i_category_id,sum(sales), sum(number_sales) - from( - select 'store' channel, i_brand_id,i_class_id - ,i_category_id,sum(ss_quantity*ss_list_price) sales - , count(*) number_sales - from store_sales - ,item - ,date_dim - where ss_item_sk in (select ss_item_sk from cross_items) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 1999+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales) - union all - select 'catalog' channel, i_brand_id,i_class_id,i_category_id, sum(cs_quantity*cs_list_price) sales, count(*) number_sales - from catalog_sales - ,item - ,date_dim - where cs_item_sk in (select ss_item_sk from cross_items) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 1999+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(cs_quantity*cs_list_price) > (select average_sales from avg_sales) - union all - select 'web' channel, i_brand_id,i_class_id,i_category_id, sum(ws_quantity*ws_list_price) sales , count(*) number_sales - from web_sales - ,item - ,date_dim - where ws_item_sk in (select ss_item_sk from cross_items) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 1999+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ws_quantity*ws_list_price) > (select average_sales from avg_sales) - ) y - group by rollup (channel, i_brand_id,i_class_id,i_category_id) - order by channel,i_brand_id,i_class_id,i_category_id - limit 100""" - qt_ds_shape_14 ''' - explain shape plan - with cross_items as - (select i_item_sk ss_item_sk - from item, - (select iss.i_brand_id brand_id - ,iss.i_class_id class_id - ,iss.i_category_id category_id - from store_sales - ,item iss - ,date_dim d1 - where ss_item_sk = iss.i_item_sk - and ss_sold_date_sk = d1.d_date_sk - and d1.d_year between 1999 AND 1999 + 2 - intersect - select ics.i_brand_id - ,ics.i_class_id - ,ics.i_category_id - from catalog_sales - ,item ics - ,date_dim d2 - where cs_item_sk = ics.i_item_sk - and cs_sold_date_sk = d2.d_date_sk - and d2.d_year between 1999 AND 1999 + 2 - intersect - select iws.i_brand_id - ,iws.i_class_id - ,iws.i_category_id - from web_sales - ,item iws - ,date_dim d3 - where ws_item_sk = iws.i_item_sk - and ws_sold_date_sk = d3.d_date_sk - and d3.d_year between 1999 AND 1999 + 2) - t where i_brand_id = brand_id - and i_class_id = class_id - and i_category_id = category_id -), -avg_sales as - (select avg(quantity*list_price) average_sales - from (select ss_quantity quantity - ,ss_list_price list_price - from store_sales - ,date_dim - where ss_sold_date_sk = d_date_sk - and d_year between 1999 and 1999 + 2 - union all - select cs_quantity quantity - ,cs_list_price list_price - from catalog_sales - ,date_dim - where cs_sold_date_sk = d_date_sk - and d_year between 1999 and 1999 + 2 - union all - select ws_quantity quantity - ,ws_list_price list_price - from web_sales - ,date_dim - where ws_sold_date_sk = d_date_sk - and d_year between 1999 and 1999 + 2) x) - select channel, i_brand_id,i_class_id,i_category_id,sum(sales), sum(number_sales) - from( - select 'store' channel, i_brand_id,i_class_id - ,i_category_id,sum(ss_quantity*ss_list_price) sales - , count(*) number_sales - from store_sales - ,item - ,date_dim - where ss_item_sk in (select ss_item_sk from cross_items) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 1999+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales) - union all - select 'catalog' channel, i_brand_id,i_class_id,i_category_id, sum(cs_quantity*cs_list_price) sales, count(*) number_sales - from catalog_sales - ,item - ,date_dim - where cs_item_sk in (select ss_item_sk from cross_items) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 1999+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(cs_quantity*cs_list_price) > (select average_sales from avg_sales) - union all - select 'web' channel, i_brand_id,i_class_id,i_category_id, sum(ws_quantity*ws_list_price) sales , count(*) number_sales - from web_sales - ,item - ,date_dim - where ws_item_sk in (select ss_item_sk from cross_items) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 1999+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ws_quantity*ws_list_price) > (select average_sales from avg_sales) - ) y - group by rollup (channel, i_brand_id,i_class_id,i_category_id) - order by channel,i_brand_id,i_class_id,i_category_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query15.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query15.groovy deleted file mode 100644 index 05d8d44bcdaddc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query15.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ca_zip - ,sum(cs_sales_price) - from catalog_sales - ,customer - ,customer_address - ,date_dim - where cs_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', - '85392', '85460', '80348', '81792') - or ca_state in ('CA','WA','GA') - or cs_sales_price > 500) - and cs_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 2001 - group by ca_zip - order by ca_zip - limit 100""" - qt_ds_shape_15 ''' - explain shape plan - select ca_zip - ,sum(cs_sales_price) - from catalog_sales - ,customer - ,customer_address - ,date_dim - where cs_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', - '85392', '85460', '80348', '81792') - or ca_state in ('CA','WA','GA') - or cs_sales_price > 500) - and cs_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 2001 - group by ca_zip - order by ca_zip - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query16.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query16.groovy deleted file mode 100644 index 23e94ad465c6b8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query16.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - count(distinct cs_order_number) as "order count" - ,sum(cs_ext_ship_cost) as "total shipping cost" - ,sum(cs_net_profit) as "total net profit" -from - catalog_sales cs1 - ,date_dim - ,customer_address - ,call_center -where - d_date between '2002-4-01' and - (cast('2002-4-01' as date) + interval 60 day) -and cs1.cs_ship_date_sk = d_date_sk -and cs1.cs_ship_addr_sk = ca_address_sk -and ca_state = 'PA' -and cs1.cs_call_center_sk = cc_call_center_sk -and cc_county in ('Williamson County','Williamson County','Williamson County','Williamson County', - 'Williamson County' -) -and exists (select * - from catalog_sales cs2 - where cs1.cs_order_number = cs2.cs_order_number - and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) -and not exists(select * - from catalog_returns cr1 - where cs1.cs_order_number = cr1.cr_order_number) -order by count(distinct cs_order_number) -limit 100""" - qt_ds_shape_16 ''' - explain shape plan - select - count(distinct cs_order_number) as "order count" - ,sum(cs_ext_ship_cost) as "total shipping cost" - ,sum(cs_net_profit) as "total net profit" -from - catalog_sales cs1 - ,date_dim - ,customer_address - ,call_center -where - d_date between '2002-4-01' and - (cast('2002-4-01' as date) + interval 60 day) -and cs1.cs_ship_date_sk = d_date_sk -and cs1.cs_ship_addr_sk = ca_address_sk -and ca_state = 'PA' -and cs1.cs_call_center_sk = cc_call_center_sk -and cc_county in ('Williamson County','Williamson County','Williamson County','Williamson County', - 'Williamson County' -) -and exists (select * - from catalog_sales cs2 - where cs1.cs_order_number = cs2.cs_order_number - and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) -and not exists(select * - from catalog_returns cr1 - where cs1.cs_order_number = cr1.cr_order_number) -order by count(distinct cs_order_number) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query17.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query17.groovy deleted file mode 100644 index 6bde5e8c30e2d2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query17.groovy +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,s_state - ,count(ss_quantity) as store_sales_quantitycount - ,avg(ss_quantity) as store_sales_quantityave - ,stddev_samp(ss_quantity) as store_sales_quantitystdev - ,stddev_samp(ss_quantity)/avg(ss_quantity) as store_sales_quantitycov - ,count(sr_return_quantity) as store_returns_quantitycount - ,avg(sr_return_quantity) as store_returns_quantityave - ,stddev_samp(sr_return_quantity) as store_returns_quantitystdev - ,stddev_samp(sr_return_quantity)/avg(sr_return_quantity) as store_returns_quantitycov - ,count(cs_quantity) as catalog_sales_quantitycount ,avg(cs_quantity) as catalog_sales_quantityave - ,stddev_samp(cs_quantity) as catalog_sales_quantitystdev - ,stddev_samp(cs_quantity)/avg(cs_quantity) as catalog_sales_quantitycov - from store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where d1.d_quarter_name = '2001Q1' - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - group by i_item_id - ,i_item_desc - ,s_state - order by i_item_id - ,i_item_desc - ,s_state -limit 100""" - qt_ds_shape_17 ''' - explain shape plan - select i_item_id - ,i_item_desc - ,s_state - ,count(ss_quantity) as store_sales_quantitycount - ,avg(ss_quantity) as store_sales_quantityave - ,stddev_samp(ss_quantity) as store_sales_quantitystdev - ,stddev_samp(ss_quantity)/avg(ss_quantity) as store_sales_quantitycov - ,count(sr_return_quantity) as store_returns_quantitycount - ,avg(sr_return_quantity) as store_returns_quantityave - ,stddev_samp(sr_return_quantity) as store_returns_quantitystdev - ,stddev_samp(sr_return_quantity)/avg(sr_return_quantity) as store_returns_quantitycov - ,count(cs_quantity) as catalog_sales_quantitycount ,avg(cs_quantity) as catalog_sales_quantityave - ,stddev_samp(cs_quantity) as catalog_sales_quantitystdev - ,stddev_samp(cs_quantity)/avg(cs_quantity) as catalog_sales_quantitycov - from store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where d1.d_quarter_name = '2001Q1' - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - group by i_item_id - ,i_item_desc - ,s_state - order by i_item_id - ,i_item_desc - ,s_state -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query18.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query18.groovy deleted file mode 100644 index 1b3ee9b374b321..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query18.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - ca_country, - ca_state, - ca_county, - avg( cast(cs_quantity as decimal(12,2))) agg1, - avg( cast(cs_list_price as decimal(12,2))) agg2, - avg( cast(cs_coupon_amt as decimal(12,2))) agg3, - avg( cast(cs_sales_price as decimal(12,2))) agg4, - avg( cast(cs_net_profit as decimal(12,2))) agg5, - avg( cast(c_birth_year as decimal(12,2))) agg6, - avg( cast(cd1.cd_dep_count as decimal(12,2))) agg7 - from catalog_sales, customer_demographics cd1, - customer_demographics cd2, customer, customer_address, date_dim, item - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd1.cd_demo_sk and - cs_bill_customer_sk = c_customer_sk and - cd1.cd_gender = 'F' and - cd1.cd_education_status = 'Primary' and - c_current_cdemo_sk = cd2.cd_demo_sk and - c_current_addr_sk = ca_address_sk and - c_birth_month in (1,3,7,11,10,4) and - d_year = 2001 and - ca_state in ('AL','MO','TN' - ,'GA','MT','IN','CA') - group by rollup (i_item_id, ca_country, ca_state, ca_county) - order by ca_country, - ca_state, - ca_county, - i_item_id - limit 100""" - qt_ds_shape_18 ''' - explain shape plan - select i_item_id, - ca_country, - ca_state, - ca_county, - avg( cast(cs_quantity as decimal(12,2))) agg1, - avg( cast(cs_list_price as decimal(12,2))) agg2, - avg( cast(cs_coupon_amt as decimal(12,2))) agg3, - avg( cast(cs_sales_price as decimal(12,2))) agg4, - avg( cast(cs_net_profit as decimal(12,2))) agg5, - avg( cast(c_birth_year as decimal(12,2))) agg6, - avg( cast(cd1.cd_dep_count as decimal(12,2))) agg7 - from catalog_sales, customer_demographics cd1, - customer_demographics cd2, customer, customer_address, date_dim, item - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd1.cd_demo_sk and - cs_bill_customer_sk = c_customer_sk and - cd1.cd_gender = 'F' and - cd1.cd_education_status = 'Primary' and - c_current_cdemo_sk = cd2.cd_demo_sk and - c_current_addr_sk = ca_address_sk and - c_birth_month in (1,3,7,11,10,4) and - d_year = 2001 and - ca_state in ('AL','MO','TN' - ,'GA','MT','IN','CA') - group by rollup (i_item_id, ca_country, ca_state, ca_county) - order by ca_country, - ca_state, - ca_county, - i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query19.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query19.groovy deleted file mode 100644 index 43a9dbdd37a710..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query19.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=14 - and d_moy=11 - and d_year=2002 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 """ - qt_ds_shape_19 ''' - explain shape plan - select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=14 - and d_moy=11 - and d_year=2002 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query2.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query2.groovy deleted file mode 100644 index e6a85af6b5c792..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query2.groovy +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with wscs as - (select sold_date_sk - ,sales_price - from (select ws_sold_date_sk sold_date_sk - ,ws_ext_sales_price sales_price - from web_sales - union all - select cs_sold_date_sk sold_date_sk - ,cs_ext_sales_price sales_price - from catalog_sales) t), - wswscs as - (select d_week_seq, - sum(case when (d_day_name='Sunday') then sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then sales_price else null end) sat_sales - from wscs - ,date_dim - where d_date_sk = sold_date_sk - group by d_week_seq) - select d_week_seq1 - ,round(sun_sales1/sun_sales2,2) - ,round(mon_sales1/mon_sales2,2) - ,round(tue_sales1/tue_sales2,2) - ,round(wed_sales1/wed_sales2,2) - ,round(thu_sales1/thu_sales2,2) - ,round(fri_sales1/fri_sales2,2) - ,round(sat_sales1/sat_sales2,2) - from - (select wswscs.d_week_seq d_week_seq1 - ,sun_sales sun_sales1 - ,mon_sales mon_sales1 - ,tue_sales tue_sales1 - ,wed_sales wed_sales1 - ,thu_sales thu_sales1 - ,fri_sales fri_sales1 - ,sat_sales sat_sales1 - from wswscs,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998) y, - (select wswscs.d_week_seq d_week_seq2 - ,sun_sales sun_sales2 - ,mon_sales mon_sales2 - ,tue_sales tue_sales2 - ,wed_sales wed_sales2 - ,thu_sales thu_sales2 - ,fri_sales fri_sales2 - ,sat_sales sat_sales2 - from wswscs - ,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998+1) z - where d_week_seq1=d_week_seq2-53 - order by d_week_seq1""" - qt_ds_shape_2 ''' - explain shape plan - with wscs as - (select sold_date_sk - ,sales_price - from (select ws_sold_date_sk sold_date_sk - ,ws_ext_sales_price sales_price - from web_sales - union all - select cs_sold_date_sk sold_date_sk - ,cs_ext_sales_price sales_price - from catalog_sales) t), - wswscs as - (select d_week_seq, - sum(case when (d_day_name='Sunday') then sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then sales_price else null end) sat_sales - from wscs - ,date_dim - where d_date_sk = sold_date_sk - group by d_week_seq) - select d_week_seq1 - ,round(sun_sales1/sun_sales2,2) - ,round(mon_sales1/mon_sales2,2) - ,round(tue_sales1/tue_sales2,2) - ,round(wed_sales1/wed_sales2,2) - ,round(thu_sales1/thu_sales2,2) - ,round(fri_sales1/fri_sales2,2) - ,round(sat_sales1/sat_sales2,2) - from - (select wswscs.d_week_seq d_week_seq1 - ,sun_sales sun_sales1 - ,mon_sales mon_sales1 - ,tue_sales tue_sales1 - ,wed_sales wed_sales1 - ,thu_sales thu_sales1 - ,fri_sales fri_sales1 - ,sat_sales sat_sales1 - from wswscs,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998) y, - (select wswscs.d_week_seq d_week_seq2 - ,sun_sales sun_sales2 - ,mon_sales mon_sales2 - ,tue_sales tue_sales2 - ,wed_sales wed_sales2 - ,thu_sales thu_sales2 - ,fri_sales fri_sales2 - ,sat_sales sat_sales2 - from wswscs - ,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998+1) z - where d_week_seq1=d_week_seq2-53 - order by d_week_seq1 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query20.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query20.groovy deleted file mode 100644 index e132b59a3806b7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query20.groovy +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(cs_ext_sales_price) as itemrevenue - ,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over - (partition by i_class) as revenueratio - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and i_category in ('Books', 'Music', 'Sports') - and cs_sold_date_sk = d_date_sk - and d_date between cast('2002-06-18' as date) - and (cast('2002-06-18' as date) + interval 30 day) - group by i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - order by i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100""" - qt_ds_shape_20 ''' - explain shape plan - select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(cs_ext_sales_price) as itemrevenue - ,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over - (partition by i_class) as revenueratio - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and i_category in ('Books', 'Music', 'Sports') - and cs_sold_date_sk = d_date_sk - and d_date between cast('2002-06-18' as date) - and (cast('2002-06-18' as date) + interval 30 day) - group by i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - order by i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query21.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query21.groovy deleted file mode 100644 index 7f079f8806f79c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query21.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'SET enable_fold_constant_by_be = false' //plan shape will be different - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * - from(select w_warehouse_name - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('1999-06-22' as date)) - then inv_quantity_on_hand - else 0 end) as inv_before - ,sum(case when (cast(d_date as date) >= cast ('1999-06-22' as date)) - then inv_quantity_on_hand - else 0 end) as inv_after - from inventory - ,warehouse - ,item - ,date_dim - where i_current_price between 0.99 and 1.49 - and i_item_sk = inv_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_date between (cast ('1999-06-22' as date) - interval 30 day) - and (cast ('1999-06-22' as date) + interval 30 day) - group by w_warehouse_name, i_item_id) x - where (case when inv_before > 0 - then inv_after / inv_before - else null - end) between 2.0/3.0 and 3.0/2.0 - order by w_warehouse_name - ,i_item_id - limit 100""" - qt_ds_shape_21 ''' - explain shape plan - select * - from(select w_warehouse_name - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('1999-06-22' as date)) - then inv_quantity_on_hand - else 0 end) as inv_before - ,sum(case when (cast(d_date as date) >= cast ('1999-06-22' as date)) - then inv_quantity_on_hand - else 0 end) as inv_after - from inventory - ,warehouse - ,item - ,date_dim - where i_current_price between 0.99 and 1.49 - and i_item_sk = inv_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_date between (cast ('1999-06-22' as date) - interval 30 day) - and (cast ('1999-06-22' as date) + interval 30 day) - group by w_warehouse_name, i_item_id) x - where (case when inv_before > 0 - then inv_after / inv_before - else null - end) between 2.0/3.0 and 3.0/2.0 - order by w_warehouse_name - ,i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query22.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query22.groovy deleted file mode 100644 index 3eea3092249ead..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query22.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_product_name - ,i_brand - ,i_class - ,i_category - ,avg(inv_quantity_on_hand) qoh - from inventory - ,date_dim - ,item - where inv_date_sk=d_date_sk - and inv_item_sk=i_item_sk - and d_month_seq between 1200 and 1200 + 11 - group by rollup(i_product_name - ,i_brand - ,i_class - ,i_category) -order by qoh, i_product_name, i_brand, i_class, i_category -limit 100""" - qt_ds_shape_22 ''' - explain shape plan - select i_product_name - ,i_brand - ,i_class - ,i_category - ,avg(inv_quantity_on_hand) qoh - from inventory - ,date_dim - ,item - where inv_date_sk=d_date_sk - and inv_item_sk=i_item_sk - and d_month_seq between 1200 and 1200 + 11 - group by rollup(i_product_name - ,i_brand - ,i_class - ,i_category) -order by qoh, i_product_name, i_brand, i_class, i_category -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query23.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query23.groovy deleted file mode 100644 index 28268f8fcbcdf9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query23.groovy +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), -best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 7 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 7 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100""" - qt_ds_shape_23 ''' - explain shape plan - with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), -best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 7 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 7 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query24.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query24.groovy deleted file mode 100644 index b64674097d7b86..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query24.groovy +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssales as -(select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_paid) netpaid -from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address -where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip -and s_market_id=5 -group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) -select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid -from ssales -where i_color = 'aquamarine' -group by c_last_name - ,c_first_name - ,s_store_name -having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) -order by c_last_name - ,c_first_name - ,s_store_name -""" - qt_ds_shape_24 ''' - explain shape plan - with ssales as -(select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_paid) netpaid -from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address -where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip -and s_market_id=5 -group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) -select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid -from ssales -where i_color = 'aquamarine' -group by c_last_name - ,c_first_name - ,s_store_name -having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) -order by c_last_name - ,c_first_name - ,s_store_name - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query25.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query25.groovy deleted file mode 100644 index e206d6f27dd536..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query25.groovy +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query25") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,max(ss_net_profit) as store_sales_profit - ,max(sr_net_loss) as store_returns_loss - ,max(cs_net_profit) as catalog_sales_profit - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1999 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 10 - and d2.d_year = 1999 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_moy between 4 and 10 - and d3.d_year = 1999 - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100""" - qt_ds_shape_25 ''' - explain shape plan - select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,max(ss_net_profit) as store_sales_profit - ,max(sr_net_loss) as store_returns_loss - ,max(cs_net_profit) as catalog_sales_profit - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1999 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 10 - and d2.d_year = 1999 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_moy between 4 and 10 - and d3.d_year = 1999 - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query26.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query26.groovy deleted file mode 100644 index 2f2a1f1ca16a4f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query26.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query26") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - avg(cs_quantity) agg1, - avg(cs_list_price) agg2, - avg(cs_coupon_amt) agg3, - avg(cs_sales_price) agg4 - from catalog_sales, customer_demographics, date_dim, item, promotion - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd_demo_sk and - cs_promo_sk = p_promo_sk and - cd_gender = 'M' and - cd_marital_status = 'W' and - cd_education_status = 'Unknown' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2002 - group by i_item_id - order by i_item_id - limit 100""" - qt_ds_shape_26 ''' - explain shape plan - select i_item_id, - avg(cs_quantity) agg1, - avg(cs_list_price) agg2, - avg(cs_coupon_amt) agg3, - avg(cs_sales_price) agg4 - from catalog_sales, customer_demographics, date_dim, item, promotion - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd_demo_sk and - cs_promo_sk = p_promo_sk and - cd_gender = 'M' and - cd_marital_status = 'W' and - cd_education_status = 'Unknown' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2002 - group by i_item_id - order by i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query27.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query27.groovy deleted file mode 100644 index 6bdbc27bc295c0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query27.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query27") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - s_state, grouping(s_state) g_state, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, store, item - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_store_sk = s_store_sk and - ss_cdemo_sk = cd_demo_sk and - cd_gender = 'M' and - cd_marital_status = 'W' and - cd_education_status = 'Secondary' and - d_year = 1999 and - s_state in ('TN','TN', 'TN', 'TN', 'TN', 'TN') - group by rollup (i_item_id, s_state) - order by i_item_id - ,s_state - limit 100""" - qt_ds_shape_27 ''' - explain shape plan - select i_item_id, - s_state, grouping(s_state) g_state, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, store, item - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_store_sk = s_store_sk and - ss_cdemo_sk = cd_demo_sk and - cd_gender = 'M' and - cd_marital_status = 'W' and - cd_education_status = 'Secondary' and - d_year = 1999 and - s_state in ('TN','TN', 'TN', 'TN', 'TN', 'TN') - group by rollup (i_item_id, s_state) - order by i_item_id - ,s_state - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query28.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query28.groovy deleted file mode 100644 index e7f8ae691933b6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query28.groovy +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query28") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select avg(ss_list_price) B1_LP - ,count(ss_list_price) B1_CNT - ,count(distinct ss_list_price) B1_CNTD - from store_sales - where ss_quantity between 0 and 5 - and (ss_list_price between 107 and 107+10 - or ss_coupon_amt between 1319 and 1319+1000 - or ss_wholesale_cost between 60 and 60+20)) B1, - (select avg(ss_list_price) B2_LP - ,count(ss_list_price) B2_CNT - ,count(distinct ss_list_price) B2_CNTD - from store_sales - where ss_quantity between 6 and 10 - and (ss_list_price between 23 and 23+10 - or ss_coupon_amt between 825 and 825+1000 - or ss_wholesale_cost between 43 and 43+20)) B2, - (select avg(ss_list_price) B3_LP - ,count(ss_list_price) B3_CNT - ,count(distinct ss_list_price) B3_CNTD - from store_sales - where ss_quantity between 11 and 15 - and (ss_list_price between 74 and 74+10 - or ss_coupon_amt between 4381 and 4381+1000 - or ss_wholesale_cost between 57 and 57+20)) B3, - (select avg(ss_list_price) B4_LP - ,count(ss_list_price) B4_CNT - ,count(distinct ss_list_price) B4_CNTD - from store_sales - where ss_quantity between 16 and 20 - and (ss_list_price between 89 and 89+10 - or ss_coupon_amt between 3117 and 3117+1000 - or ss_wholesale_cost between 68 and 68+20)) B4, - (select avg(ss_list_price) B5_LP - ,count(ss_list_price) B5_CNT - ,count(distinct ss_list_price) B5_CNTD - from store_sales - where ss_quantity between 21 and 25 - and (ss_list_price between 58 and 58+10 - or ss_coupon_amt between 9402 and 9402+1000 - or ss_wholesale_cost between 38 and 38+20)) B5, - (select avg(ss_list_price) B6_LP - ,count(ss_list_price) B6_CNT - ,count(distinct ss_list_price) B6_CNTD - from store_sales - where ss_quantity between 26 and 30 - and (ss_list_price between 64 and 64+10 - or ss_coupon_amt between 5792 and 5792+1000 - or ss_wholesale_cost between 73 and 73+20)) B6 -limit 100""" - qt_ds_shape_28 ''' - explain shape plan - select * -from (select avg(ss_list_price) B1_LP - ,count(ss_list_price) B1_CNT - ,count(distinct ss_list_price) B1_CNTD - from store_sales - where ss_quantity between 0 and 5 - and (ss_list_price between 107 and 107+10 - or ss_coupon_amt between 1319 and 1319+1000 - or ss_wholesale_cost between 60 and 60+20)) B1, - (select avg(ss_list_price) B2_LP - ,count(ss_list_price) B2_CNT - ,count(distinct ss_list_price) B2_CNTD - from store_sales - where ss_quantity between 6 and 10 - and (ss_list_price between 23 and 23+10 - or ss_coupon_amt between 825 and 825+1000 - or ss_wholesale_cost between 43 and 43+20)) B2, - (select avg(ss_list_price) B3_LP - ,count(ss_list_price) B3_CNT - ,count(distinct ss_list_price) B3_CNTD - from store_sales - where ss_quantity between 11 and 15 - and (ss_list_price between 74 and 74+10 - or ss_coupon_amt between 4381 and 4381+1000 - or ss_wholesale_cost between 57 and 57+20)) B3, - (select avg(ss_list_price) B4_LP - ,count(ss_list_price) B4_CNT - ,count(distinct ss_list_price) B4_CNTD - from store_sales - where ss_quantity between 16 and 20 - and (ss_list_price between 89 and 89+10 - or ss_coupon_amt between 3117 and 3117+1000 - or ss_wholesale_cost between 68 and 68+20)) B4, - (select avg(ss_list_price) B5_LP - ,count(ss_list_price) B5_CNT - ,count(distinct ss_list_price) B5_CNTD - from store_sales - where ss_quantity between 21 and 25 - and (ss_list_price between 58 and 58+10 - or ss_coupon_amt between 9402 and 9402+1000 - or ss_wholesale_cost between 38 and 38+20)) B5, - (select avg(ss_list_price) B6_LP - ,count(ss_list_price) B6_CNT - ,count(distinct ss_list_price) B6_CNTD - from store_sales - where ss_quantity between 26 and 30 - and (ss_list_price between 64 and 64+10 - or ss_coupon_amt between 5792 and 5792+1000 - or ss_wholesale_cost between 73 and 73+20)) B6 -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query29.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query29.groovy deleted file mode 100644 index bc0a11be1f4227..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query29.groovy +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query29") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,max(ss_quantity) as store_sales_quantity - ,max(sr_return_quantity) as store_returns_quantity - ,max(cs_quantity) as catalog_sales_quantity - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1998 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 4 + 3 - and d2.d_year = 1998 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_year in (1998,1998+1,1998+2) - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100""" - qt_ds_shape_29 ''' - explain shape plan - select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,max(ss_quantity) as store_sales_quantity - ,max(sr_return_quantity) as store_returns_quantity - ,max(cs_quantity) as catalog_sales_quantity - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1998 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 4 + 3 - and d2.d_year = 1998 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_year in (1998,1998+1,1998+2) - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query3.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query3.groovy deleted file mode 100644 index c396e81a258154..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query3.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_sales_price) sum_agg - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manufact_id = 816 - and dt.d_moy=11 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,sum_agg desc - ,brand_id - limit 100""" - qt_ds_shape_3 ''' - explain shape plan - select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_sales_price) sum_agg - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manufact_id = 816 - and dt.d_moy=11 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,sum_agg desc - ,brand_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query30.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query30.groovy deleted file mode 100644 index 3677b56c442266..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query30.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query30") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as - (select wr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(wr_return_amt) as ctr_total_return - from web_returns - ,date_dim - ,customer_address - where wr_returned_date_sk = d_date_sk - and d_year =2000 - and wr_returning_addr_sk = ca_address_sk - group by wr_returning_customer_sk - ,ca_state) - select c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'AR' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return -limit 100""" - qt_ds_shape_30 ''' - explain shape plan - with customer_total_return as - (select wr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(wr_return_amt) as ctr_total_return - from web_returns - ,date_dim - ,customer_address - where wr_returned_date_sk = d_date_sk - and d_year =2000 - and wr_returning_addr_sk = ca_address_sk - group by wr_returning_customer_sk - ,ca_state) - select c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'AR' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query31.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query31.groovy deleted file mode 100644 index faca7bb52750e0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query31.groovy +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query31") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as - (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales - from store_sales,date_dim,customer_address - where ss_sold_date_sk = d_date_sk - and ss_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year), - ws as - (select ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales - from web_sales,date_dim,customer_address - where ws_sold_date_sk = d_date_sk - and ws_bill_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year) - select - ss1.ca_county - ,ss1.d_year - ,ws2.web_sales/ws1.web_sales web_q1_q2_increase - ,ss2.store_sales/ss1.store_sales store_q1_q2_increase - ,ws3.web_sales/ws2.web_sales web_q2_q3_increase - ,ss3.store_sales/ss2.store_sales store_q2_q3_increase - from - ss ss1 - ,ss ss2 - ,ss ss3 - ,ws ws1 - ,ws ws2 - ,ws ws3 - where - ss1.d_qoy = 1 - and ss1.d_year = 1999 - and ss1.ca_county = ss2.ca_county - and ss2.d_qoy = 2 - and ss2.d_year = 1999 - and ss2.ca_county = ss3.ca_county - and ss3.d_qoy = 3 - and ss3.d_year = 1999 - and ss1.ca_county = ws1.ca_county - and ws1.d_qoy = 1 - and ws1.d_year = 1999 - and ws1.ca_county = ws2.ca_county - and ws2.d_qoy = 2 - and ws2.d_year = 1999 - and ws1.ca_county = ws3.ca_county - and ws3.d_qoy = 3 - and ws3.d_year =1999 - and case when ws1.web_sales > 0 then ws2.web_sales/ws1.web_sales else null end - > case when ss1.store_sales > 0 then ss2.store_sales/ss1.store_sales else null end - and case when ws2.web_sales > 0 then ws3.web_sales/ws2.web_sales else null end - > case when ss2.store_sales > 0 then ss3.store_sales/ss2.store_sales else null end - order by store_q2_q3_increase""" - qt_ds_shape_31 ''' - explain shape plan - with ss as - (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales - from store_sales,date_dim,customer_address - where ss_sold_date_sk = d_date_sk - and ss_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year), - ws as - (select ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales - from web_sales,date_dim,customer_address - where ws_sold_date_sk = d_date_sk - and ws_bill_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year) - select - ss1.ca_county - ,ss1.d_year - ,ws2.web_sales/ws1.web_sales web_q1_q2_increase - ,ss2.store_sales/ss1.store_sales store_q1_q2_increase - ,ws3.web_sales/ws2.web_sales web_q2_q3_increase - ,ss3.store_sales/ss2.store_sales store_q2_q3_increase - from - ss ss1 - ,ss ss2 - ,ss ss3 - ,ws ws1 - ,ws ws2 - ,ws ws3 - where - ss1.d_qoy = 1 - and ss1.d_year = 1999 - and ss1.ca_county = ss2.ca_county - and ss2.d_qoy = 2 - and ss2.d_year = 1999 - and ss2.ca_county = ss3.ca_county - and ss3.d_qoy = 3 - and ss3.d_year = 1999 - and ss1.ca_county = ws1.ca_county - and ws1.d_qoy = 1 - and ws1.d_year = 1999 - and ws1.ca_county = ws2.ca_county - and ws2.d_qoy = 2 - and ws2.d_year = 1999 - and ws1.ca_county = ws3.ca_county - and ws3.d_qoy = 3 - and ws3.d_year =1999 - and case when ws1.web_sales > 0 then ws2.web_sales/ws1.web_sales else null end - > case when ss1.store_sales > 0 then ss2.store_sales/ss1.store_sales else null end - and case when ws2.web_sales > 0 then ws3.web_sales/ws2.web_sales else null end - > case when ss2.store_sales > 0 then ss3.store_sales/ss2.store_sales else null end - order by store_q2_q3_increase - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query32.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query32.groovy deleted file mode 100644 index 7f6ed9a0c5a721..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query32.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query32") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """select sum(cs_ext_discount_amt) as "excess discount amount" -from - catalog_sales - ,item - ,date_dim -where -i_manufact_id = 722 -and i_item_sk = cs_item_sk -and d_date between '2001-03-09' and - (cast('2001-03-09' as date) + interval 90 day) -and d_date_sk = cs_sold_date_sk -and cs_ext_discount_amt - > ( - select - 1.3 * avg(cs_ext_discount_amt) - from - catalog_sales - ,date_dim - where - cs_item_sk = i_item_sk - and d_date between '2001-03-09' and - (cast('2001-03-09' as date) + interval 90 day) - and d_date_sk = cs_sold_date_sk - ) -limit 100""" - qt_ds_shape_32 ''' - explain shape plan - select sum(cs_ext_discount_amt) as "excess discount amount" -from - catalog_sales - ,item - ,date_dim -where -i_manufact_id = 722 -and i_item_sk = cs_item_sk -and d_date between '2001-03-09' and - (cast('2001-03-09' as date) + interval 90 day) -and d_date_sk = cs_sold_date_sk -and cs_ext_discount_amt - > ( - select - 1.3 * avg(cs_ext_discount_amt) - from - catalog_sales - ,date_dim - where - cs_item_sk = i_item_sk - and d_date between '2001-03-09' and - (cast('2001-03-09' as date) + interval 90 day) - and d_date_sk = cs_sold_date_sk - ) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query33.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query33.groovy deleted file mode 100644 index 8e1ab9f66da27e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query33.groovy +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query33") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select - i_manufact_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Books')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2001 - and d_moy = 3 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - cs as ( - select - i_manufact_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Books')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2001 - and d_moy = 3 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - ws as ( - select - i_manufact_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Books')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2001 - and d_moy = 3 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id) - select i_manufact_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_manufact_id - order by total_sales -limit 100""" - qt_ds_shape_33 ''' - explain shape plan - with ss as ( - select - i_manufact_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Books')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2001 - and d_moy = 3 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - cs as ( - select - i_manufact_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Books')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2001 - and d_moy = 3 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - ws as ( - select - i_manufact_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Books')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2001 - and d_moy = 3 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id) - select i_manufact_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_manufact_id - order by total_sales -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query34.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query34.groovy deleted file mode 100644 index 98a2d27c001b41..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query34.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query34") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '0-500') - and household_demographics.hd_vehicle_count > 0 - and (case when household_demographics.hd_vehicle_count > 0 - then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count - else null - end) > 1.2 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Williamson County','Williamson County','Williamson County','Williamson County', - 'Williamson County','Williamson County','Williamson County','Williamson County') - group by ss_ticket_number,ss_customer_sk) dn,customer - where ss_customer_sk = c_customer_sk - and cnt between 15 and 20 - order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number""" - qt_ds_shape_34 ''' - explain shape plan - select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '0-500') - and household_demographics.hd_vehicle_count > 0 - and (case when household_demographics.hd_vehicle_count > 0 - then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count - else null - end) > 1.2 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Williamson County','Williamson County','Williamson County','Williamson County', - 'Williamson County','Williamson County','Williamson County','Williamson County') - group by ss_ticket_number,ss_customer_sk) dn,customer - where ss_customer_sk = c_customer_sk - and cnt between 15 and 20 - order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query35.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query35.groovy deleted file mode 100644 index 3d68f9a85ac51c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query35.groovy +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query35") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - count(*) cnt1, - avg(cd_dep_count), - stddev_samp(cd_dep_count), - sum(cd_dep_count), - cd_dep_employed_count, - count(*) cnt2, - avg(cd_dep_employed_count), - stddev_samp(cd_dep_employed_count), - sum(cd_dep_employed_count), - cd_dep_college_count, - count(*) cnt3, - avg(cd_dep_college_count), - stddev_samp(cd_dep_college_count), - sum(cd_dep_college_count) - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 1999 and - d_qoy < 4) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 1999 and - d_qoy < 4) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 1999 and - d_qoy < 4)) - group by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100""" - qt_ds_shape_35 ''' - explain shape plan - select - ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - count(*) cnt1, - avg(cd_dep_count), - stddev_samp(cd_dep_count), - sum(cd_dep_count), - cd_dep_employed_count, - count(*) cnt2, - avg(cd_dep_employed_count), - stddev_samp(cd_dep_employed_count), - sum(cd_dep_employed_count), - cd_dep_college_count, - count(*) cnt3, - avg(cd_dep_college_count), - stddev_samp(cd_dep_college_count), - sum(cd_dep_college_count) - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 1999 and - d_qoy < 4) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 1999 and - d_qoy < 4) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 1999 and - d_qoy < 4)) - group by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query36.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query36.groovy deleted file mode 100644 index 7a681f2906089a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query36.groovy +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query36") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ss_net_profit)/sum(ss_ext_sales_price) asc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,item - ,store - where - d1.d_year = 2000 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and s_state in ('TN','TN','TN','TN', - 'TN','TN','TN','TN') - group by rollup(i_category,i_class) - order by - lochierarchy desc - ,case when lochierarchy = 0 then i_category end - ,rank_within_parent - limit 100""" - qt_ds_shape_36 ''' - explain shape plan - select - sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ss_net_profit)/sum(ss_ext_sales_price) asc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,item - ,store - where - d1.d_year = 2000 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and s_state in ('TN','TN','TN','TN', - 'TN','TN','TN','TN') - group by rollup(i_category,i_class) - order by - lochierarchy desc - ,case when lochierarchy = 0 then i_category end - ,rank_within_parent - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query37.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query37.groovy deleted file mode 100644 index 5d41320124388e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query37.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query37") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, catalog_sales - where i_current_price between 29 and 29 + 30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('2002-03-29' as date) and (cast('2002-03-29' as date) + interval 60 day) - and i_manufact_id in (705,742,777,944) - and inv_quantity_on_hand between 100 and 500 - and cs_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100""" - qt_ds_shape_37 ''' - explain shape plan - select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, catalog_sales - where i_current_price between 29 and 29 + 30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('2002-03-29' as date) and (cast('2002-03-29' as date) + interval 60 day) - and i_manufact_id in (705,742,777,944) - and inv_quantity_on_hand between 100 and 500 - and cs_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query38.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query38.groovy deleted file mode 100644 index d758d7a43561a7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query38.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query38") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """select count(*) from ( - select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1189 and 1189 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1189 and 1189 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1189 and 1189 + 11 -) hot_cust -limit 100""" - qt_ds_shape_38 ''' - explain shape plan - select count(*) from ( - select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1189 and 1189 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1189 and 1189 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1189 and 1189 + 11 -) hot_cust -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query39.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query39.groovy deleted file mode 100644 index 5392d2b82f8c21..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query39.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query39") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with inv as -(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stdev,mean, case mean when 0 then null else stdev/mean end cov - from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean - from inventory - ,item - ,warehouse - ,date_dim - where inv_item_sk = i_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_year =2000 - group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo - where case mean when 0 then 0 else stdev/mean end > 1) -select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov - ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov -from inv inv1,inv inv2 -where inv1.i_item_sk = inv2.i_item_sk - and inv1.w_warehouse_sk = inv2.w_warehouse_sk - and inv1.d_moy=1 - and inv2.d_moy=1+1 -order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov - ,inv2.d_moy,inv2.mean, inv2.cov""" - qt_ds_shape_39 ''' - explain shape plan - with inv as -(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stdev,mean, case mean when 0 then null else stdev/mean end cov - from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean - from inventory - ,item - ,warehouse - ,date_dim - where inv_item_sk = i_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_year =2000 - group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo - where case mean when 0 then 0 else stdev/mean end > 1) -select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov - ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov -from inv inv1,inv inv2 -where inv1.i_item_sk = inv2.i_item_sk - and inv1.w_warehouse_sk = inv2.w_warehouse_sk - and inv1.d_moy=1 - and inv2.d_moy=1+1 -order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov - ,inv2.d_moy,inv2.mean, inv2.cov - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query4.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query4.groovy deleted file mode 100644 index 24689dda916653..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query4.groovy +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)/2) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((cs_ext_list_price-cs_ext_wholesale_cost-cs_ext_discount_amt)+cs_ext_sales_price)/2) ) year_total - ,'c' sale_type - from customer - ,catalog_sales - ,date_dim - where c_customer_sk = cs_bill_customer_sk - and cs_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year -union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((ws_ext_list_price-ws_ext_wholesale_cost-ws_ext_discount_amt)+ws_ext_sales_price)/2) ) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_c_firstyear - ,year_total t_c_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_c_secyear.customer_id - and t_s_firstyear.customer_id = t_c_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_c_firstyear.sale_type = 'c' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_c_secyear.sale_type = 'c' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1999 - and t_s_secyear.dyear = 1999+1 - and t_c_firstyear.dyear = 1999 - and t_c_secyear.dyear = 1999+1 - and t_w_firstyear.dyear = 1999 - and t_w_secyear.dyear = 1999+1 - and t_s_firstyear.year_total > 0 - and t_c_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100""" - qt_ds_shape_4 ''' - explain shape plan - with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)/2) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((cs_ext_list_price-cs_ext_wholesale_cost-cs_ext_discount_amt)+cs_ext_sales_price)/2) ) year_total - ,'c' sale_type - from customer - ,catalog_sales - ,date_dim - where c_customer_sk = cs_bill_customer_sk - and cs_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year -union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((ws_ext_list_price-ws_ext_wholesale_cost-ws_ext_discount_amt)+ws_ext_sales_price)/2) ) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_c_firstyear - ,year_total t_c_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_c_secyear.customer_id - and t_s_firstyear.customer_id = t_c_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_c_firstyear.sale_type = 'c' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_c_secyear.sale_type = 'c' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1999 - and t_s_secyear.dyear = 1999+1 - and t_c_firstyear.dyear = 1999 - and t_c_secyear.dyear = 1999+1 - and t_w_firstyear.dyear = 1999 - and t_w_secyear.dyear = 1999+1 - and t_s_firstyear.year_total > 0 - and t_c_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query40.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query40.groovy deleted file mode 100644 index ac7a71d3233856..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query40.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query40") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - w_state - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2001-05-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before - ,sum(case when (cast(d_date as date) >= cast ('2001-05-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after - from - catalog_sales left outer join catalog_returns on - (cs_order_number = cr_order_number - and cs_item_sk = cr_item_sk) - ,warehouse - ,item - ,date_dim - where - i_current_price between 0.99 and 1.49 - and i_item_sk = cs_item_sk - and cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and d_date between (cast ('2001-05-02' as date) - interval 30 day) - and (cast ('2001-05-02' as date) + interval 30 day) - group by - w_state,i_item_id - order by w_state,i_item_id -limit 100""" - qt_ds_shape_40 ''' - explain shape plan - select - w_state - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2001-05-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before - ,sum(case when (cast(d_date as date) >= cast ('2001-05-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after - from - catalog_sales left outer join catalog_returns on - (cs_order_number = cr_order_number - and cs_item_sk = cr_item_sk) - ,warehouse - ,item - ,date_dim - where - i_current_price between 0.99 and 1.49 - and i_item_sk = cs_item_sk - and cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and d_date between (cast ('2001-05-02' as date) - interval 30 day) - and (cast ('2001-05-02' as date) + interval 30 day) - group by - w_state,i_item_id - order by w_state,i_item_id -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query41.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query41.groovy deleted file mode 100644 index 0754878e39e7a2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query41.groovy +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query41") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select distinct(i_product_name) - from item i1 - where i_manufact_id between 704 and 704+40 - and (select count(*) as item_cnt - from item - where (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'forest' or i_color = 'lime') and - (i_units = 'Pallet' or i_units = 'Pound') and - (i_size = 'economy' or i_size = 'small') - ) or - (i_category = 'Women' and - (i_color = 'navy' or i_color = 'slate') and - (i_units = 'Gross' or i_units = 'Bunch') and - (i_size = 'extra large' or i_size = 'petite') - ) or - (i_category = 'Men' and - (i_color = 'powder' or i_color = 'sky') and - (i_units = 'Dozen' or i_units = 'Lb') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'maroon' or i_color = 'smoke') and - (i_units = 'Ounce' or i_units = 'Case') and - (i_size = 'economy' or i_size = 'small') - ))) or - (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'dark' or i_color = 'aquamarine') and - (i_units = 'Ton' or i_units = 'Tbl') and - (i_size = 'economy' or i_size = 'small') - ) or - (i_category = 'Women' and - (i_color = 'frosted' or i_color = 'plum') and - (i_units = 'Dram' or i_units = 'Box') and - (i_size = 'extra large' or i_size = 'petite') - ) or - (i_category = 'Men' and - (i_color = 'papaya' or i_color = 'peach') and - (i_units = 'Bundle' or i_units = 'Carton') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'firebrick' or i_color = 'sienna') and - (i_units = 'Cup' or i_units = 'Each') and - (i_size = 'economy' or i_size = 'small') - )))) > 0 - order by i_product_name - limit 100""" - qt_ds_shape_41 ''' - explain shape plan - select distinct(i_product_name) - from item i1 - where i_manufact_id between 704 and 704+40 - and (select count(*) as item_cnt - from item - where (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'forest' or i_color = 'lime') and - (i_units = 'Pallet' or i_units = 'Pound') and - (i_size = 'economy' or i_size = 'small') - ) or - (i_category = 'Women' and - (i_color = 'navy' or i_color = 'slate') and - (i_units = 'Gross' or i_units = 'Bunch') and - (i_size = 'extra large' or i_size = 'petite') - ) or - (i_category = 'Men' and - (i_color = 'powder' or i_color = 'sky') and - (i_units = 'Dozen' or i_units = 'Lb') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'maroon' or i_color = 'smoke') and - (i_units = 'Ounce' or i_units = 'Case') and - (i_size = 'economy' or i_size = 'small') - ))) or - (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'dark' or i_color = 'aquamarine') and - (i_units = 'Ton' or i_units = 'Tbl') and - (i_size = 'economy' or i_size = 'small') - ) or - (i_category = 'Women' and - (i_color = 'frosted' or i_color = 'plum') and - (i_units = 'Dram' or i_units = 'Box') and - (i_size = 'extra large' or i_size = 'petite') - ) or - (i_category = 'Men' and - (i_color = 'papaya' or i_color = 'peach') and - (i_units = 'Bundle' or i_units = 'Carton') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'firebrick' or i_color = 'sienna') and - (i_units = 'Cup' or i_units = 'Each') and - (i_size = 'economy' or i_size = 'small') - )))) > 0 - order by i_product_name - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query42.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query42.groovy deleted file mode 100644 index 44f5c9f0a6fff7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query42.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query42") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_category_id - ,item.i_category - ,sum(ss_ext_sales_price) - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=11 - and dt.d_year=1998 - group by dt.d_year - ,item.i_category_id - ,item.i_category - order by sum(ss_ext_sales_price) desc,dt.d_year - ,item.i_category_id - ,item.i_category -limit 100 """ - qt_ds_shape_42 ''' - explain shape plan - select dt.d_year - ,item.i_category_id - ,item.i_category - ,sum(ss_ext_sales_price) - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=11 - and dt.d_year=1998 - group by dt.d_year - ,item.i_category_id - ,item.i_category - order by sum(ss_ext_sales_price) desc,dt.d_year - ,item.i_category_id - ,item.i_category -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query43.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query43.groovy deleted file mode 100644 index abb3e84a2dc74f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query43.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query43") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select s_store_name, s_store_id, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from date_dim, store_sales, store - where d_date_sk = ss_sold_date_sk and - s_store_sk = ss_store_sk and - s_gmt_offset = -5 and - d_year = 2000 - group by s_store_name, s_store_id - order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales - limit 100""" - qt_ds_shape_43 ''' - explain shape plan - select s_store_name, s_store_id, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from date_dim, store_sales, store - where d_date_sk = ss_sold_date_sk and - s_store_sk = ss_store_sk and - s_gmt_offset = -5 and - d_year = 2000 - group by s_store_name, s_store_id - order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query44.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query44.groovy deleted file mode 100644 index 46c52ced591560..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query44.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query44") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100""" - qt_ds_shape_44 ''' - explain shape plan - select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query45.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query45.groovy deleted file mode 100644 index 9c27eb60e16869..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query45.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query45") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100""" - qt_ds_shape_45 ''' - explain shape plan - select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query46.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query46.groovy deleted file mode 100644 index 44f48ecde635c3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query46.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query46") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and (household_demographics.hd_dep_count = 8 or - household_demographics.hd_vehicle_count= 0) - and date_dim.d_dow in (6,0) - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_city in ('Midway','Fairview','Fairview','Midway','Fairview') - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - limit 100""" - qt_ds_shape_46 ''' - explain shape plan - select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and (household_demographics.hd_dep_count = 8 or - household_demographics.hd_vehicle_count= 0) - and date_dim.d_dow in (6,0) - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_city in ('Midway','Fairview','Fairview','Midway','Fairview') - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query47.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query47.groovy deleted file mode 100644 index ee5856493b5f98..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query47.groovy +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query47") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with v1 as( - select i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, - s_store_name, s_company_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - s_store_name, s_company_name - order by d_year, d_moy) rn - from item, store_sales, date_dim, store - where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - ( - d_year = 2000 or - ( d_year = 2000-1 and d_moy =12) or - ( d_year = 2000+1 and d_moy =1) - ) - group by i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy), - v2 as( - select v1.s_store_name, v1.s_company_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1.s_store_name = v1_lag.s_store_name and - v1.s_store_name = v1_lead.s_store_name and - v1.s_company_name = v1_lag.s_company_name and - v1.s_company_name = v1_lead.s_company_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2000 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100""" - qt_ds_shape_47 ''' - explain shape plan - with v1 as( - select i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, - s_store_name, s_company_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - s_store_name, s_company_name - order by d_year, d_moy) rn - from item, store_sales, date_dim, store - where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - ( - d_year = 2000 or - ( d_year = 2000-1 and d_moy =12) or - ( d_year = 2000+1 and d_moy =1) - ) - group by i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy), - v2 as( - select v1.s_store_name, v1.s_company_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1.s_store_name = v1_lag.s_store_name and - v1.s_store_name = v1_lead.s_store_name and - v1.s_company_name = v1_lag.s_company_name and - v1.s_company_name = v1_lead.s_company_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2000 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query48.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query48.groovy deleted file mode 100644 index fa89e8b5976f70..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query48.groovy +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query48") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select sum (ss_quantity) - from store_sales, store, customer_demographics, customer_address, date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and - ( - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'S' - and - cd_education_status = 'Secondary' - and - ss_sales_price between 100.00 and 150.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'M' - and - cd_education_status = '2 yr Degree' - and - ss_sales_price between 50.00 and 100.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'D' - and - cd_education_status = 'Advanced Degree' - and - ss_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('ND', 'NY', 'SD') - and ss_net_profit between 0 and 2000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MD', 'GA', 'KS') - and ss_net_profit between 150 and 3000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('CO', 'MN', 'NC') - and ss_net_profit between 50 and 25000 - ) - ) -""" - qt_ds_shape_48 ''' - explain shape plan - select sum (ss_quantity) - from store_sales, store, customer_demographics, customer_address, date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and - ( - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'S' - and - cd_education_status = 'Secondary' - and - ss_sales_price between 100.00 and 150.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'M' - and - cd_education_status = '2 yr Degree' - and - ss_sales_price between 50.00 and 100.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'D' - and - cd_education_status = 'Advanced Degree' - and - ss_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('ND', 'NY', 'SD') - and ss_net_profit between 0 and 2000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MD', 'GA', 'KS') - and ss_net_profit between 150 and 3000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('CO', 'MN', 'NC') - and ss_net_profit between 50 and 25000 - ) - ) - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query49.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query49.groovy deleted file mode 100644 index 5883d3522930e5..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query49.groovy +++ /dev/null @@ -1,295 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query49") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select channel, item, return_ratio, return_rank, currency_rank from - (select - 'web' as channel - ,web.item - ,web.return_ratio - ,web.return_rank - ,web.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select ws.ws_item_sk as item - ,(cast(sum(coalesce(wr.wr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(wr.wr_return_amt,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - web_sales ws left outer join web_returns wr - on (ws.ws_order_number = wr.wr_order_number and - ws.ws_item_sk = wr.wr_item_sk) - ,date_dim - where - wr.wr_return_amt > 10000 - and ws.ws_net_profit > 1 - and ws.ws_net_paid > 0 - and ws.ws_quantity > 0 - and ws_sold_date_sk = d_date_sk - and d_year = 1998 - and d_moy = 11 - group by ws.ws_item_sk - ) in_web - ) web - where - ( - web.return_rank <= 10 - or - web.currency_rank <= 10 - ) - union - select - 'catalog' as channel - ,catalog.item - ,catalog.return_ratio - ,catalog.return_rank - ,catalog.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select - cs.cs_item_sk as item - ,(cast(sum(coalesce(cr.cr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(cr.cr_return_amount,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - catalog_sales cs left outer join catalog_returns cr - on (cs.cs_order_number = cr.cr_order_number and - cs.cs_item_sk = cr.cr_item_sk) - ,date_dim - where - cr.cr_return_amount > 10000 - and cs.cs_net_profit > 1 - and cs.cs_net_paid > 0 - and cs.cs_quantity > 0 - and cs_sold_date_sk = d_date_sk - and d_year = 1998 - and d_moy = 11 - group by cs.cs_item_sk - ) in_cat - ) catalog - where - ( - catalog.return_rank <= 10 - or - catalog.currency_rank <=10 - ) - union - select - 'store' as channel - ,store.item - ,store.return_ratio - ,store.return_rank - ,store.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select sts.ss_item_sk as item - ,(cast(sum(coalesce(sr.sr_return_quantity,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(sr.sr_return_amt,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - store_sales sts left outer join store_returns sr - on (sts.ss_ticket_number = sr.sr_ticket_number and sts.ss_item_sk = sr.sr_item_sk) - ,date_dim - where - sr.sr_return_amt > 10000 - and sts.ss_net_profit > 1 - and sts.ss_net_paid > 0 - and sts.ss_quantity > 0 - and ss_sold_date_sk = d_date_sk - and d_year = 1998 - and d_moy = 11 - group by sts.ss_item_sk - ) in_store - ) store - where ( - store.return_rank <= 10 - or - store.currency_rank <= 10 - ) - ) - t order by 1,4,5,2 - limit 100""" - qt_ds_shape_49 ''' - explain shape plan - select channel, item, return_ratio, return_rank, currency_rank from - (select - 'web' as channel - ,web.item - ,web.return_ratio - ,web.return_rank - ,web.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select ws.ws_item_sk as item - ,(cast(sum(coalesce(wr.wr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(wr.wr_return_amt,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - web_sales ws left outer join web_returns wr - on (ws.ws_order_number = wr.wr_order_number and - ws.ws_item_sk = wr.wr_item_sk) - ,date_dim - where - wr.wr_return_amt > 10000 - and ws.ws_net_profit > 1 - and ws.ws_net_paid > 0 - and ws.ws_quantity > 0 - and ws_sold_date_sk = d_date_sk - and d_year = 1998 - and d_moy = 11 - group by ws.ws_item_sk - ) in_web - ) web - where - ( - web.return_rank <= 10 - or - web.currency_rank <= 10 - ) - union - select - 'catalog' as channel - ,catalog.item - ,catalog.return_ratio - ,catalog.return_rank - ,catalog.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select - cs.cs_item_sk as item - ,(cast(sum(coalesce(cr.cr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(cr.cr_return_amount,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - catalog_sales cs left outer join catalog_returns cr - on (cs.cs_order_number = cr.cr_order_number and - cs.cs_item_sk = cr.cr_item_sk) - ,date_dim - where - cr.cr_return_amount > 10000 - and cs.cs_net_profit > 1 - and cs.cs_net_paid > 0 - and cs.cs_quantity > 0 - and cs_sold_date_sk = d_date_sk - and d_year = 1998 - and d_moy = 11 - group by cs.cs_item_sk - ) in_cat - ) catalog - where - ( - catalog.return_rank <= 10 - or - catalog.currency_rank <=10 - ) - union - select - 'store' as channel - ,store.item - ,store.return_ratio - ,store.return_rank - ,store.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select sts.ss_item_sk as item - ,(cast(sum(coalesce(sr.sr_return_quantity,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(sr.sr_return_amt,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - store_sales sts left outer join store_returns sr - on (sts.ss_ticket_number = sr.sr_ticket_number and sts.ss_item_sk = sr.sr_item_sk) - ,date_dim - where - sr.sr_return_amt > 10000 - and sts.ss_net_profit > 1 - and sts.ss_net_paid > 0 - and sts.ss_quantity > 0 - and ss_sold_date_sk = d_date_sk - and d_year = 1998 - and d_moy = 11 - group by sts.ss_item_sk - ) in_store - ) store - where ( - store.return_rank <= 10 - or - store.currency_rank <= 10 - ) - ) - t order by 1,4,5,2 - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query5.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query5.groovy deleted file mode 100644 index f1db7b7207b66d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query5.groovy +++ /dev/null @@ -1,293 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssr as - (select s_store_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ss_store_sk as store_sk, - ss_sold_date_sk as date_sk, - ss_ext_sales_price as sales_price, - ss_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from store_sales - union all - select sr_store_sk as store_sk, - sr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - sr_return_amt as return_amt, - sr_net_loss as net_loss - from store_returns - ) salesreturns, - date_dim, - store - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and store_sk = s_store_sk - group by s_store_id) - , - csr as - (select cp_catalog_page_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select cs_catalog_page_sk as page_sk, - cs_sold_date_sk as date_sk, - cs_ext_sales_price as sales_price, - cs_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from catalog_sales - union all - select cr_catalog_page_sk as page_sk, - cr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - cr_return_amount as return_amt, - cr_net_loss as net_loss - from catalog_returns - ) salesreturns, - date_dim, - catalog_page - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and page_sk = cp_catalog_page_sk - group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ws_web_site_sk as wsr_web_site_sk, - ws_sold_date_sk as date_sk, - ws_ext_sales_price as sales_price, - ws_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from web_sales - union all - select ws_web_site_sk as wsr_web_site_sk, - wr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - wr_return_amt as return_amt, - wr_net_loss as net_loss - from web_returns left outer join web_sales on - ( wr_item_sk = ws_item_sk - and wr_order_number = ws_order_number) - ) salesreturns, - date_dim, - web_site - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and wsr_web_site_sk = web_site_sk - group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', s_store_id) id - , sales - , returns - , (profit - profit_loss) as profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', cp_catalog_page_id) id - , sales - , returns - , (profit - profit_loss) as profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) id - , sales - , returns - , (profit - profit_loss) as profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_5 ''' - explain shape plan - with ssr as - (select s_store_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ss_store_sk as store_sk, - ss_sold_date_sk as date_sk, - ss_ext_sales_price as sales_price, - ss_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from store_sales - union all - select sr_store_sk as store_sk, - sr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - sr_return_amt as return_amt, - sr_net_loss as net_loss - from store_returns - ) salesreturns, - date_dim, - store - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and store_sk = s_store_sk - group by s_store_id) - , - csr as - (select cp_catalog_page_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select cs_catalog_page_sk as page_sk, - cs_sold_date_sk as date_sk, - cs_ext_sales_price as sales_price, - cs_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from catalog_sales - union all - select cr_catalog_page_sk as page_sk, - cr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - cr_return_amount as return_amt, - cr_net_loss as net_loss - from catalog_returns - ) salesreturns, - date_dim, - catalog_page - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and page_sk = cp_catalog_page_sk - group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ws_web_site_sk as wsr_web_site_sk, - ws_sold_date_sk as date_sk, - ws_ext_sales_price as sales_price, - ws_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from web_sales - union all - select ws_web_site_sk as wsr_web_site_sk, - wr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - wr_return_amt as return_amt, - wr_net_loss as net_loss - from web_returns left outer join web_sales on - ( wr_item_sk = ws_item_sk - and wr_order_number = ws_order_number) - ) salesreturns, - date_dim, - web_site - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and wsr_web_site_sk = web_site_sk - group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', s_store_id) id - , sales - , returns - , (profit - profit_loss) as profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', cp_catalog_page_id) id - , sales - , returns - , (profit - profit_loss) as profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) id - , sales - , returns - , (profit - profit_loss) as profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query50.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query50.groovy deleted file mode 100644 index 3a0a41bad7a6d9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query50.groovy +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query50") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 30) and - (sr_returned_date_sk - ss_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 60) and - (sr_returned_date_sk - ss_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 90) and - (sr_returned_date_sk - ss_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - store_sales - ,store_returns - ,store - ,date_dim d1 - ,date_dim d2 -where - d2.d_year = 2001 -and d2.d_moy = 8 -and ss_ticket_number = sr_ticket_number -and ss_item_sk = sr_item_sk -and ss_sold_date_sk = d1.d_date_sk -and sr_returned_date_sk = d2.d_date_sk -and ss_customer_sk = sr_customer_sk -and ss_store_sk = s_store_sk -group by - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -order by s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -limit 100""" - qt_ds_shape_50 ''' - explain shape plan - select - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 30) and - (sr_returned_date_sk - ss_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 60) and - (sr_returned_date_sk - ss_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 90) and - (sr_returned_date_sk - ss_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - store_sales - ,store_returns - ,store - ,date_dim d1 - ,date_dim d2 -where - d2.d_year = 2001 -and d2.d_moy = 8 -and ss_ticket_number = sr_ticket_number -and ss_item_sk = sr_item_sk -and ss_sold_date_sk = d1.d_date_sk -and sr_returned_date_sk = d2.d_date_sk -and ss_customer_sk = sr_customer_sk -and ss_store_sk = s_store_sk -group by - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -order by s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query51.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query51.groovy deleted file mode 100644 index d7fe227ff00e3e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query51.groovy +++ /dev/null @@ -1,147 +0,0 @@ -import java.util.stream.Collectors - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query51") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_common_expr_pushdown=false" - sql "set enable_function_pushdown=true" - sql "set enable_parallel_result_sink=false" - sql "set experimental_parallel_scan_max_scanners_count=16" - sql "set experimental_parallel_scan_min_rows_per_scanner=128" - sql "set fragment_transmission_compression_codec=lz4" - sql "set insert_visible_timeout_ms=60000" - sql "set partitioned_hash_agg_rows_threshold=1048576" - sql "set partitioned_hash_join_rows_threshold=8" - sql "set topn_opt_limit_threshold=1" - sql "set wait_timeout=31000" - - - def variables = sql "show variables" - def variableString = variables.stream() - .map { it.toString() } - .collect(Collectors.joining("\n")) - logger.info("Variables:\n${variableString}") - - def ds = """WITH web_v1 as ( -select - ws_item_sk item_sk, d_date, - sum(sum(ws_sales_price)) - over (partition by ws_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from web_sales - ,date_dim -where ws_sold_date_sk=d_date_sk - and d_month_seq between 1212 and 1212+11 - and ws_item_sk is not NULL -group by ws_item_sk, d_date), -store_v1 as ( -select - ss_item_sk item_sk, d_date, - sum(sum(ss_sales_price)) - over (partition by ss_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from store_sales - ,date_dim -where ss_sold_date_sk=d_date_sk - and d_month_seq between 1212 and 1212+11 - and ss_item_sk is not NULL -group by ss_item_sk, d_date) - select * -from (select item_sk - ,d_date - ,web_sales - ,store_sales - ,max(web_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) web_cumulative - ,max(store_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) store_cumulative - from (select case when web.item_sk is not null then web.item_sk else store.item_sk end item_sk - ,case when web.d_date is not null then web.d_date else store.d_date end d_date - ,web.cume_sales web_sales - ,store.cume_sales store_sales - from web_v1 web full outer join store_v1 store on (web.item_sk = store.item_sk - and web.d_date = store.d_date) - )x )y -where web_cumulative > store_cumulative -order by item_sk - ,d_date -limit 100""" - qt_ds_shape_51 ''' - explain shape plan - WITH web_v1 as ( -select - ws_item_sk item_sk, d_date, - sum(sum(ws_sales_price)) - over (partition by ws_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from web_sales - ,date_dim -where ws_sold_date_sk=d_date_sk - and d_month_seq between 1212 and 1212+11 - and ws_item_sk is not NULL -group by ws_item_sk, d_date), -store_v1 as ( -select - ss_item_sk item_sk, d_date, - sum(sum(ss_sales_price)) - over (partition by ss_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from store_sales - ,date_dim -where ss_sold_date_sk=d_date_sk - and d_month_seq between 1212 and 1212+11 - and ss_item_sk is not NULL -group by ss_item_sk, d_date) - select * -from (select item_sk - ,d_date - ,web_sales - ,store_sales - ,max(web_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) web_cumulative - ,max(store_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) store_cumulative - from (select case when web.item_sk is not null then web.item_sk else store.item_sk end item_sk - ,case when web.d_date is not null then web.d_date else store.d_date end d_date - ,web.cume_sales web_sales - ,store.cume_sales store_sales - from web_v1 web full outer join store_v1 store on (web.item_sk = store.item_sk - and web.d_date = store.d_date) - )x )y -where web_cumulative > store_cumulative -order by item_sk - ,d_date -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query52.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query52.groovy deleted file mode 100644 index 327bf50ae0ed32..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query52.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query52") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_ext_sales_price) ext_price - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=12 - and dt.d_year=2000 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,ext_price desc - ,brand_id -limit 100 """ - qt_ds_shape_52 ''' - explain shape plan - select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_ext_sales_price) ext_price - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=12 - and dt.d_year=2000 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,ext_price desc - ,brand_id -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query53.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query53.groovy deleted file mode 100644 index b23a25424e4a6f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query53.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query53") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * from -(select i_manufact_id, -sum(ss_sales_price) sum_sales, -avg(sum(ss_sales_price)) over (partition by i_manufact_id) avg_quarterly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and -ss_sold_date_sk = d_date_sk and -ss_store_sk = s_store_sk and -d_month_seq in (1186,1186+1,1186+2,1186+3,1186+4,1186+5,1186+6,1186+7,1186+8,1186+9,1186+10,1186+11) and -((i_category in ('Books','Children','Electronics') and -i_class in ('personal','portable','reference','self-help') and -i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) -or(i_category in ('Women','Music','Men') and -i_class in ('accessories','classical','fragrances','pants') and -i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manufact_id, d_qoy ) tmp1 -where case when avg_quarterly_sales > 0 - then abs (sum_sales - avg_quarterly_sales)/ avg_quarterly_sales - else null end > 0.1 -order by avg_quarterly_sales, - sum_sales, - i_manufact_id -limit 100""" - qt_ds_shape_53 ''' - explain shape plan - select * from -(select i_manufact_id, -sum(ss_sales_price) sum_sales, -avg(sum(ss_sales_price)) over (partition by i_manufact_id) avg_quarterly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and -ss_sold_date_sk = d_date_sk and -ss_store_sk = s_store_sk and -d_month_seq in (1186,1186+1,1186+2,1186+3,1186+4,1186+5,1186+6,1186+7,1186+8,1186+9,1186+10,1186+11) and -((i_category in ('Books','Children','Electronics') and -i_class in ('personal','portable','reference','self-help') and -i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) -or(i_category in ('Women','Music','Men') and -i_class in ('accessories','classical','fragrances','pants') and -i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manufact_id, d_qoy ) tmp1 -where case when avg_quarterly_sales > 0 - then abs (sum_sales - avg_quarterly_sales)/ avg_quarterly_sales - else null end > 0.1 -order by avg_quarterly_sales, - sum_sales, - i_manufact_id -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query54.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query54.groovy deleted file mode 100644 index 1ee7c497f8f1c1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query54.groovy +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query54") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Music' - and i_class = 'country' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 1 - and d_year = 1999 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1999 and d_moy = 1) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1999 and d_moy = 1) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100""" - qt_ds_shape_54 ''' - explain shape plan - with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Music' - and i_class = 'country' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 1 - and d_year = 1999 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1999 and d_moy = 1) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1999 and d_moy = 1) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query55.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query55.groovy deleted file mode 100644 index c2d4c1db731a68..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query55.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query55") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=52 - and d_moy=11 - and d_year=2000 - group by i_brand, i_brand_id - order by ext_price desc, i_brand_id -limit 100 """ - qt_ds_shape_55 ''' - explain shape plan - select i_brand_id brand_id, i_brand brand, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=52 - and d_moy=11 - and d_year=2000 - group by i_brand, i_brand_id - order by ext_price desc, i_brand_id -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query56.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query56.groovy deleted file mode 100644 index f9a99301af1af6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query56.groovy +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query56") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100""" - qt_ds_shape_56 ''' - explain shape plan - with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query57.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query57.groovy deleted file mode 100644 index b1c5ec41918363..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query57.groovy +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query57") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with v1 as( - select i_category, i_brand, - cc_name, - d_year, d_moy, - sum(cs_sales_price) sum_sales, - avg(sum(cs_sales_price)) over - (partition by i_category, i_brand, - cc_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - cc_name - order by d_year, d_moy) rn - from item, catalog_sales, date_dim, call_center - where cs_item_sk = i_item_sk and - cs_sold_date_sk = d_date_sk and - cc_call_center_sk= cs_call_center_sk and - ( - d_year = 2001 or - ( d_year = 2001-1 and d_moy =12) or - ( d_year = 2001+1 and d_moy =1) - ) - group by i_category, i_brand, - cc_name , d_year, d_moy), - v2 as( - select v1.i_category, v1.i_brand, v1.cc_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1. cc_name = v1_lag. cc_name and - v1. cc_name = v1_lead. cc_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2001 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, avg_monthly_sales - limit 100""" - qt_ds_shape_57 ''' - explain shape plan - with v1 as( - select i_category, i_brand, - cc_name, - d_year, d_moy, - sum(cs_sales_price) sum_sales, - avg(sum(cs_sales_price)) over - (partition by i_category, i_brand, - cc_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - cc_name - order by d_year, d_moy) rn - from item, catalog_sales, date_dim, call_center - where cs_item_sk = i_item_sk and - cs_sold_date_sk = d_date_sk and - cc_call_center_sk= cs_call_center_sk and - ( - d_year = 2001 or - ( d_year = 2001-1 and d_moy =12) or - ( d_year = 2001+1 and d_moy =1) - ) - group by i_category, i_brand, - cc_name , d_year, d_moy), - v2 as( - select v1.i_category, v1.i_brand, v1.cc_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1. cc_name = v1_lag. cc_name and - v1. cc_name = v1_lead. cc_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2001 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, avg_monthly_sales - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query58.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query58.groovy deleted file mode 100644 index 5d618277923ebb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query58.groovy +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query58") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss_items as - (select i_item_id item_id - ,sum(ss_ext_sales_price) ss_item_rev - from store_sales - ,item - ,date_dim - where ss_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-06-16')) - and ss_sold_date_sk = d_date_sk - group by i_item_id), - cs_items as - (select i_item_id item_id - ,sum(cs_ext_sales_price) cs_item_rev - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-06-16')) - and cs_sold_date_sk = d_date_sk - group by i_item_id), - ws_items as - (select i_item_id item_id - ,sum(ws_ext_sales_price) ws_item_rev - from web_sales - ,item - ,date_dim - where ws_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq =(select d_week_seq - from date_dim - where d_date = '2001-06-16')) - and ws_sold_date_sk = d_date_sk - group by i_item_id) - select ss_items.item_id - ,ss_item_rev - ,ss_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ss_dev - ,cs_item_rev - ,cs_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 cs_dev - ,ws_item_rev - ,ws_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ws_dev - ,(ss_item_rev+cs_item_rev+ws_item_rev)/3 average - from ss_items,cs_items,ws_items - where ss_items.item_id=cs_items.item_id - and ss_items.item_id=ws_items.item_id - and ss_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - and ss_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and cs_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and cs_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and ws_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and ws_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - order by item_id - ,ss_item_rev - limit 100""" - qt_ds_shape_58 ''' - explain shape plan - with ss_items as - (select i_item_id item_id - ,sum(ss_ext_sales_price) ss_item_rev - from store_sales - ,item - ,date_dim - where ss_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-06-16')) - and ss_sold_date_sk = d_date_sk - group by i_item_id), - cs_items as - (select i_item_id item_id - ,sum(cs_ext_sales_price) cs_item_rev - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-06-16')) - and cs_sold_date_sk = d_date_sk - group by i_item_id), - ws_items as - (select i_item_id item_id - ,sum(ws_ext_sales_price) ws_item_rev - from web_sales - ,item - ,date_dim - where ws_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq =(select d_week_seq - from date_dim - where d_date = '2001-06-16')) - and ws_sold_date_sk = d_date_sk - group by i_item_id) - select ss_items.item_id - ,ss_item_rev - ,ss_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ss_dev - ,cs_item_rev - ,cs_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 cs_dev - ,ws_item_rev - ,ws_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ws_dev - ,(ss_item_rev+cs_item_rev+ws_item_rev)/3 average - from ss_items,cs_items,ws_items - where ss_items.item_id=cs_items.item_id - and ss_items.item_id=ws_items.item_id - and ss_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - and ss_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and cs_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and cs_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and ws_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and ws_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - order by item_id - ,ss_item_rev - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query59.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query59.groovy deleted file mode 100644 index 9bda7dd0b108ce..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query59.groovy +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query59") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with wss as - (select d_week_seq, - ss_store_sk, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - group by d_week_seq,ss_store_sk - ) - select s_store_name1,s_store_id1,d_week_seq1 - ,sun_sales1/sun_sales2,mon_sales1/mon_sales2 - ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2 - ,fri_sales1/fri_sales2,sat_sales1/sat_sales2 - from - (select s_store_name s_store_name1,wss.d_week_seq d_week_seq1 - ,s_store_id s_store_id1,sun_sales sun_sales1 - ,mon_sales mon_sales1,tue_sales tue_sales1 - ,wed_sales wed_sales1,thu_sales thu_sales1 - ,fri_sales fri_sales1,sat_sales sat_sales1 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - wss.ss_store_sk = s_store_sk and - d_month_seq between 1195 and 1195 + 11) y, - (select s_store_name s_store_name2,wss.d_week_seq d_week_seq2 - ,s_store_id s_store_id2,sun_sales sun_sales2 - ,mon_sales mon_sales2,tue_sales tue_sales2 - ,wed_sales wed_sales2,thu_sales thu_sales2 - ,fri_sales fri_sales2,sat_sales sat_sales2 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - wss.ss_store_sk = s_store_sk and - d_month_seq between 1195+ 12 and 1195 + 23) x - where s_store_id1=s_store_id2 - and d_week_seq1=d_week_seq2-52 - order by s_store_name1,s_store_id1,d_week_seq1 -limit 100""" - qt_ds_shape_59 ''' - explain shape plan - with wss as - (select d_week_seq, - ss_store_sk, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - group by d_week_seq,ss_store_sk - ) - select s_store_name1,s_store_id1,d_week_seq1 - ,sun_sales1/sun_sales2,mon_sales1/mon_sales2 - ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2 - ,fri_sales1/fri_sales2,sat_sales1/sat_sales2 - from - (select s_store_name s_store_name1,wss.d_week_seq d_week_seq1 - ,s_store_id s_store_id1,sun_sales sun_sales1 - ,mon_sales mon_sales1,tue_sales tue_sales1 - ,wed_sales wed_sales1,thu_sales thu_sales1 - ,fri_sales fri_sales1,sat_sales sat_sales1 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - wss.ss_store_sk = s_store_sk and - d_month_seq between 1195 and 1195 + 11) y, - (select s_store_name s_store_name2,wss.d_week_seq d_week_seq2 - ,s_store_id s_store_id2,sun_sales sun_sales2 - ,mon_sales mon_sales2,tue_sales tue_sales2 - ,wed_sales wed_sales2,thu_sales thu_sales2 - ,fri_sales fri_sales2,sat_sales sat_sales2 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - wss.ss_store_sk = s_store_sk and - d_month_seq between 1195+ 12 and 1195 + 23) x - where s_store_id1=s_store_id2 - and d_week_seq1=d_week_seq2-52 - order by s_store_name1,s_store_id1,d_week_seq1 -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query6.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query6.groovy deleted file mode 100644 index 5267c9b608cf70..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query6.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100""" - qt_ds_shape_6 ''' - explain shape plan - select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query60.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query60.groovy deleted file mode 100644 index 362298991deb80..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query60.groovy +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query60") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select - i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Jewelry')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 10 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_item_id), - cs as ( - select - i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Jewelry')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 10 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_item_id), - ws as ( - select - i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Jewelry')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 10 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_item_id) - select - i_item_id -,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by i_item_id - ,total_sales - limit 100""" - qt_ds_shape_60 ''' - explain shape plan - with ss as ( - select - i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Jewelry')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 10 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_item_id), - cs as ( - select - i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Jewelry')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 10 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_item_id), - ws as ( - select - i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Jewelry')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 10 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_item_id) - select - i_item_id -,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by i_item_id - ,total_sales - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query61.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query61.groovy deleted file mode 100644 index d2e1029fa5c139..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query61.groovy +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query61") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) all_sales -order by promotions, total -limit 100""" - qt_ds_shape_61 ''' - explain shape plan - select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) all_sales -order by promotions, total -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query62.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query62.groovy deleted file mode 100644 index 686760596adddd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query62.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query62") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 30) and - (ws_ship_date_sk - ws_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 60) and - (ws_ship_date_sk - ws_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 90) and - (ws_ship_date_sk - ws_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - web_sales - ,warehouse - ,ship_mode - ,web_site - ,date_dim -where - d_month_seq between 1223 and 1223 + 11 -and ws_ship_date_sk = d_date_sk -and ws_warehouse_sk = w_warehouse_sk -and ws_ship_mode_sk = sm_ship_mode_sk -and ws_web_site_sk = web_site_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -limit 100""" - qt_ds_shape_62 ''' - explain shape plan - select - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 30) and - (ws_ship_date_sk - ws_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 60) and - (ws_ship_date_sk - ws_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 90) and - (ws_ship_date_sk - ws_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - web_sales - ,warehouse - ,ship_mode - ,web_site - ,date_dim -where - d_month_seq between 1223 and 1223 + 11 -and ws_ship_date_sk = d_date_sk -and ws_warehouse_sk = w_warehouse_sk -and ws_ship_mode_sk = sm_ship_mode_sk -and ws_web_site_sk = web_site_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query63.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query63.groovy deleted file mode 100644 index 1b0cd11db4190b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query63.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query63") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select i_manager_id - ,sum(ss_sales_price) sum_sales - ,avg(sum(ss_sales_price)) over (partition by i_manager_id) avg_monthly_sales - from item - ,store_sales - ,date_dim - ,store - where ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and d_month_seq in (1222,1222+1,1222+2,1222+3,1222+4,1222+5,1222+6,1222+7,1222+8,1222+9,1222+10,1222+11) - and (( i_category in ('Books','Children','Electronics') - and i_class in ('personal','portable','reference','self-help') - and i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) - or( i_category in ('Women','Music','Men') - and i_class in ('accessories','classical','fragrances','pants') - and i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manager_id, d_moy) tmp1 -where case when avg_monthly_sales > 0 then abs (sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 -order by i_manager_id - ,avg_monthly_sales - ,sum_sales -limit 100""" - qt_ds_shape_63 ''' - explain shape plan - select * -from (select i_manager_id - ,sum(ss_sales_price) sum_sales - ,avg(sum(ss_sales_price)) over (partition by i_manager_id) avg_monthly_sales - from item - ,store_sales - ,date_dim - ,store - where ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and d_month_seq in (1222,1222+1,1222+2,1222+3,1222+4,1222+5,1222+6,1222+7,1222+8,1222+9,1222+10,1222+11) - and (( i_category in ('Books','Children','Electronics') - and i_class in ('personal','portable','reference','self-help') - and i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) - or( i_category in ('Women','Music','Men') - and i_class in ('accessories','classical','fragrances','pants') - and i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manager_id, d_moy) tmp1 -where case when avg_monthly_sales > 0 then abs (sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 -order by i_manager_id - ,avg_monthly_sales - ,sum_sales -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query64.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query64.groovy deleted file mode 100644 index ae63cbc2dab186..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query64.groovy +++ /dev/null @@ -1,279 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query64") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), -cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('orange','lace','lawn','misty','blush','pink') and - i_current_price between 48 and 48 + 10 and - i_current_price between 48 + 1 and 48 + 15 -group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year -) -select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt -from cross_sales cs1,cross_sales cs2 -where cs1.item_sk=cs2.item_sk and - cs1.syear = 1999 and - cs2.syear = 1999 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip -order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1""" - qt_ds_shape_64 ''' - explain shape plan - with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), -cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('orange','lace','lawn','misty','blush','pink') and - i_current_price between 48 and 48 + 10 and - i_current_price between 48 + 1 and 48 + 15 -group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year -) -select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt -from cross_sales cs1,cross_sales cs2 -where cs1.item_sk=cs2.item_sk and - cs1.syear = 1999 and - cs2.syear = 1999 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip -order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query65.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query65.groovy deleted file mode 100644 index 25955c1ce93d77..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query65.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query65") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - s_store_name, - i_item_desc, - sc.revenue, - i_current_price, - i_wholesale_cost, - i_brand - from store, item, - (select ss_store_sk, avg(revenue) as ave - from - (select ss_store_sk, ss_item_sk, - sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1176 and 1176+11 - group by ss_store_sk, ss_item_sk) sa - group by ss_store_sk) sb, - (select ss_store_sk, ss_item_sk, sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1176 and 1176+11 - group by ss_store_sk, ss_item_sk) sc - where sb.ss_store_sk = sc.ss_store_sk and - sc.revenue <= 0.1 * sb.ave and - s_store_sk = sc.ss_store_sk and - i_item_sk = sc.ss_item_sk - order by s_store_name, i_item_desc -limit 100""" - qt_ds_shape_65 ''' - explain shape plan - select - s_store_name, - i_item_desc, - sc.revenue, - i_current_price, - i_wholesale_cost, - i_brand - from store, item, - (select ss_store_sk, avg(revenue) as ave - from - (select ss_store_sk, ss_item_sk, - sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1176 and 1176+11 - group by ss_store_sk, ss_item_sk) sa - group by ss_store_sk) sb, - (select ss_store_sk, ss_item_sk, sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1176 and 1176+11 - group by ss_store_sk, ss_item_sk) sc - where sb.ss_store_sk = sc.ss_store_sk and - sc.revenue <= 0.1 * sb.ave and - s_store_sk = sc.ss_store_sk and - i_item_sk = sc.ss_item_sk - order by s_store_name, i_item_desc -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query66.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query66.groovy deleted file mode 100644 index 19cd030b878127..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query66.groovy +++ /dev/null @@ -1,477 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query66") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - ,sum(jan_sales) as jan_sales - ,sum(feb_sales) as feb_sales - ,sum(mar_sales) as mar_sales - ,sum(apr_sales) as apr_sales - ,sum(may_sales) as may_sales - ,sum(jun_sales) as jun_sales - ,sum(jul_sales) as jul_sales - ,sum(aug_sales) as aug_sales - ,sum(sep_sales) as sep_sales - ,sum(oct_sales) as oct_sales - ,sum(nov_sales) as nov_sales - ,sum(dec_sales) as dec_sales - ,sum(jan_sales/w_warehouse_sq_ft) as jan_sales_per_sq_foot - ,sum(feb_sales/w_warehouse_sq_ft) as feb_sales_per_sq_foot - ,sum(mar_sales/w_warehouse_sq_ft) as mar_sales_per_sq_foot - ,sum(apr_sales/w_warehouse_sq_ft) as apr_sales_per_sq_foot - ,sum(may_sales/w_warehouse_sq_ft) as may_sales_per_sq_foot - ,sum(jun_sales/w_warehouse_sq_ft) as jun_sales_per_sq_foot - ,sum(jul_sales/w_warehouse_sq_ft) as jul_sales_per_sq_foot - ,sum(aug_sales/w_warehouse_sq_ft) as aug_sales_per_sq_foot - ,sum(sep_sales/w_warehouse_sq_ft) as sep_sales_per_sq_foot - ,sum(oct_sales/w_warehouse_sq_ft) as oct_sales_per_sq_foot - ,sum(nov_sales/w_warehouse_sq_ft) as nov_sales_per_sq_foot - ,sum(dec_sales/w_warehouse_sq_ft) as dec_sales_per_sq_foot - ,sum(jan_net) as jan_net - ,sum(feb_net) as feb_net - ,sum(mar_net) as mar_net - ,sum(apr_net) as apr_net - ,sum(may_net) as may_net - ,sum(jun_net) as jun_net - ,sum(jul_net) as jul_net - ,sum(aug_net) as aug_net - ,sum(sep_net) as sep_net - ,sum(oct_net) as oct_net - ,sum(nov_net) as nov_net - ,sum(dec_net) as dec_net - from ( - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('ORIENTAL ', ','), ' BOXBUNDLES') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then ws_ext_sales_price* ws_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then ws_ext_sales_price* ws_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then ws_ext_sales_price* ws_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then ws_ext_sales_price* ws_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then ws_ext_sales_price* ws_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then ws_ext_sales_price* ws_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then ws_ext_sales_price* ws_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then ws_ext_sales_price* ws_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then ws_ext_sales_price* ws_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then ws_ext_sales_price* ws_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then ws_ext_sales_price* ws_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then ws_ext_sales_price* ws_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as dec_net - from - web_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - ws_warehouse_sk = w_warehouse_sk - and ws_sold_date_sk = d_date_sk - and ws_sold_time_sk = t_time_sk - and ws_ship_mode_sk = sm_ship_mode_sk - and d_year = 2001 - and t_time between 42970 and 42970+28800 - and sm_carrier in ('ORIENTAL','BOXBUNDLES') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - union all - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('ORIENTAL ', ','), ' BOXBUNDLES') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then cs_ext_list_price* cs_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then cs_ext_list_price* cs_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then cs_ext_list_price* cs_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then cs_ext_list_price* cs_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then cs_ext_list_price* cs_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then cs_ext_list_price* cs_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then cs_ext_list_price* cs_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then cs_ext_list_price* cs_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then cs_ext_list_price* cs_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then cs_ext_list_price* cs_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then cs_ext_list_price* cs_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then cs_ext_list_price* cs_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then cs_net_paid * cs_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then cs_net_paid * cs_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then cs_net_paid * cs_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then cs_net_paid * cs_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then cs_net_paid * cs_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then cs_net_paid * cs_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then cs_net_paid * cs_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then cs_net_paid * cs_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then cs_net_paid * cs_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then cs_net_paid * cs_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then cs_net_paid * cs_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then cs_net_paid * cs_quantity else 0 end) as dec_net - from - catalog_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and cs_sold_time_sk = t_time_sk - and cs_ship_mode_sk = sm_ship_mode_sk - and d_year = 2001 - and t_time between 42970 AND 42970+28800 - and sm_carrier in ('ORIENTAL','BOXBUNDLES') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - ) x - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - order by w_warehouse_name - limit 100""" - qt_ds_shape_66 ''' - explain shape plan - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - ,sum(jan_sales) as jan_sales - ,sum(feb_sales) as feb_sales - ,sum(mar_sales) as mar_sales - ,sum(apr_sales) as apr_sales - ,sum(may_sales) as may_sales - ,sum(jun_sales) as jun_sales - ,sum(jul_sales) as jul_sales - ,sum(aug_sales) as aug_sales - ,sum(sep_sales) as sep_sales - ,sum(oct_sales) as oct_sales - ,sum(nov_sales) as nov_sales - ,sum(dec_sales) as dec_sales - ,sum(jan_sales/w_warehouse_sq_ft) as jan_sales_per_sq_foot - ,sum(feb_sales/w_warehouse_sq_ft) as feb_sales_per_sq_foot - ,sum(mar_sales/w_warehouse_sq_ft) as mar_sales_per_sq_foot - ,sum(apr_sales/w_warehouse_sq_ft) as apr_sales_per_sq_foot - ,sum(may_sales/w_warehouse_sq_ft) as may_sales_per_sq_foot - ,sum(jun_sales/w_warehouse_sq_ft) as jun_sales_per_sq_foot - ,sum(jul_sales/w_warehouse_sq_ft) as jul_sales_per_sq_foot - ,sum(aug_sales/w_warehouse_sq_ft) as aug_sales_per_sq_foot - ,sum(sep_sales/w_warehouse_sq_ft) as sep_sales_per_sq_foot - ,sum(oct_sales/w_warehouse_sq_ft) as oct_sales_per_sq_foot - ,sum(nov_sales/w_warehouse_sq_ft) as nov_sales_per_sq_foot - ,sum(dec_sales/w_warehouse_sq_ft) as dec_sales_per_sq_foot - ,sum(jan_net) as jan_net - ,sum(feb_net) as feb_net - ,sum(mar_net) as mar_net - ,sum(apr_net) as apr_net - ,sum(may_net) as may_net - ,sum(jun_net) as jun_net - ,sum(jul_net) as jul_net - ,sum(aug_net) as aug_net - ,sum(sep_net) as sep_net - ,sum(oct_net) as oct_net - ,sum(nov_net) as nov_net - ,sum(dec_net) as dec_net - from ( - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('ORIENTAL ', ','), ' BOXBUNDLES') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then ws_ext_sales_price* ws_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then ws_ext_sales_price* ws_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then ws_ext_sales_price* ws_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then ws_ext_sales_price* ws_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then ws_ext_sales_price* ws_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then ws_ext_sales_price* ws_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then ws_ext_sales_price* ws_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then ws_ext_sales_price* ws_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then ws_ext_sales_price* ws_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then ws_ext_sales_price* ws_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then ws_ext_sales_price* ws_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then ws_ext_sales_price* ws_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as dec_net - from - web_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - ws_warehouse_sk = w_warehouse_sk - and ws_sold_date_sk = d_date_sk - and ws_sold_time_sk = t_time_sk - and ws_ship_mode_sk = sm_ship_mode_sk - and d_year = 2001 - and t_time between 42970 and 42970+28800 - and sm_carrier in ('ORIENTAL','BOXBUNDLES') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - union all - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('ORIENTAL ', ','), ' BOXBUNDLES') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then cs_ext_list_price* cs_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then cs_ext_list_price* cs_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then cs_ext_list_price* cs_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then cs_ext_list_price* cs_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then cs_ext_list_price* cs_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then cs_ext_list_price* cs_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then cs_ext_list_price* cs_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then cs_ext_list_price* cs_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then cs_ext_list_price* cs_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then cs_ext_list_price* cs_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then cs_ext_list_price* cs_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then cs_ext_list_price* cs_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then cs_net_paid * cs_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then cs_net_paid * cs_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then cs_net_paid * cs_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then cs_net_paid * cs_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then cs_net_paid * cs_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then cs_net_paid * cs_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then cs_net_paid * cs_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then cs_net_paid * cs_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then cs_net_paid * cs_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then cs_net_paid * cs_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then cs_net_paid * cs_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then cs_net_paid * cs_quantity else 0 end) as dec_net - from - catalog_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and cs_sold_time_sk = t_time_sk - and cs_ship_mode_sk = sm_ship_mode_sk - and d_year = 2001 - and t_time between 42970 AND 42970+28800 - and sm_carrier in ('ORIENTAL','BOXBUNDLES') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - ) x - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - order by w_warehouse_name - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query67.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query67.groovy deleted file mode 100644 index 881919b24ab14d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query67.groovy +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query67") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1217 and 1217+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100""" - qt_ds_shape_67 ''' - explain shape plan - select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1217 and 1217+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query68.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query68.groovy deleted file mode 100644 index 30e37515cbe938..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query68.groovy +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query68") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 3 or - household_demographics.hd_vehicle_count= 4) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Fairview','Midway') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100""" - qt_ds_shape_68 ''' - explain shape plan - select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 3 or - household_demographics.hd_vehicle_count= 4) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Fairview','Midway') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query69.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query69.groovy deleted file mode 100644 index 09cbd0185b150d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query69.groovy +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query69") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_state in ('IL','TX','ME') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2002 and - d_moy between 1 and 1+2) and - (not exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2002 and - d_moy between 1 and 1+2) and - not exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2002 and - d_moy between 1 and 1+2)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - limit 100""" - qt_ds_shape_69 ''' - explain shape plan - select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_state in ('IL','TX','ME') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2002 and - d_moy between 1 and 1+2) and - (not exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2002 and - d_moy between 1 and 1+2) and - not exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2002 and - d_moy between 1 and 1+2)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query7.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query7.groovy deleted file mode 100644 index b018fd1ff86677..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query7.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, item, promotion - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_cdemo_sk = cd_demo_sk and - ss_promo_sk = p_promo_sk and - cd_gender = 'F' and - cd_marital_status = 'W' and - cd_education_status = 'College' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100""" - qt_ds_shape_7 ''' - explain shape plan - select i_item_id, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, item, promotion - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_cdemo_sk = cd_demo_sk and - ss_promo_sk = p_promo_sk and - cd_gender = 'F' and - cd_marital_status = 'W' and - cd_education_status = 'College' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query70.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query70.groovy deleted file mode 100644 index 764f9bd2483034..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query70.groovy +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query70") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ss_net_profit) as total_sum - ,s_state - ,s_county - ,grouping(s_state)+grouping(s_county) as lochierarchy - ,rank() over ( - partition by grouping(s_state)+grouping(s_county), - case when grouping(s_county) = 0 then s_state end - order by sum(ss_net_profit) desc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,store - where - d1.d_month_seq between 1220 and 1220+11 - and d1.d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - and s_state in - ( select s_state - from (select s_state as s_state, - rank() over ( partition by s_state order by sum(ss_net_profit) desc) as ranking - from store_sales, store, date_dim - where d_month_seq between 1220 and 1220+11 - and d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - group by s_state - ) tmp1 - where ranking <= 5 - ) - group by rollup(s_state,s_county) - order by - lochierarchy desc - ,case when lochierarchy = 0 then s_state end - ,rank_within_parent - limit 100""" - qt_ds_shape_70 ''' - explain shape plan - select - sum(ss_net_profit) as total_sum - ,s_state - ,s_county - ,grouping(s_state)+grouping(s_county) as lochierarchy - ,rank() over ( - partition by grouping(s_state)+grouping(s_county), - case when grouping(s_county) = 0 then s_state end - order by sum(ss_net_profit) desc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,store - where - d1.d_month_seq between 1220 and 1220+11 - and d1.d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - and s_state in - ( select s_state - from (select s_state as s_state, - rank() over ( partition by s_state order by sum(ss_net_profit) desc) as ranking - from store_sales, store, date_dim - where d_month_seq between 1220 and 1220+11 - and d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - group by s_state - ) tmp1 - where ranking <= 5 - ) - group by rollup(s_state,s_county) - order by - lochierarchy desc - ,case when lochierarchy = 0 then s_state end - ,rank_within_parent - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query71.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query71.groovy deleted file mode 100644 index 409be785e48011..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query71.groovy +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query71") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand,t_hour,t_minute, - sum(ext_price) ext_price - from item, (select ws_ext_sales_price as ext_price, - ws_sold_date_sk as sold_date_sk, - ws_item_sk as sold_item_sk, - ws_sold_time_sk as time_sk - from web_sales,date_dim - where d_date_sk = ws_sold_date_sk - and d_moy=12 - and d_year=2002 - union all - select cs_ext_sales_price as ext_price, - cs_sold_date_sk as sold_date_sk, - cs_item_sk as sold_item_sk, - cs_sold_time_sk as time_sk - from catalog_sales,date_dim - where d_date_sk = cs_sold_date_sk - and d_moy=12 - and d_year=2002 - union all - select ss_ext_sales_price as ext_price, - ss_sold_date_sk as sold_date_sk, - ss_item_sk as sold_item_sk, - ss_sold_time_sk as time_sk - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - and d_moy=12 - and d_year=2002 - ) tmp,time_dim - where - sold_item_sk = i_item_sk - and i_manager_id=1 - and time_sk = t_time_sk - and (t_meal_time = 'breakfast' or t_meal_time = 'dinner') - group by i_brand, i_brand_id,t_hour,t_minute - order by ext_price desc, i_brand_id - """ - qt_ds_shape_71 ''' - explain shape plan - select i_brand_id brand_id, i_brand brand,t_hour,t_minute, - sum(ext_price) ext_price - from item, (select ws_ext_sales_price as ext_price, - ws_sold_date_sk as sold_date_sk, - ws_item_sk as sold_item_sk, - ws_sold_time_sk as time_sk - from web_sales,date_dim - where d_date_sk = ws_sold_date_sk - and d_moy=12 - and d_year=2002 - union all - select cs_ext_sales_price as ext_price, - cs_sold_date_sk as sold_date_sk, - cs_item_sk as sold_item_sk, - cs_sold_time_sk as time_sk - from catalog_sales,date_dim - where d_date_sk = cs_sold_date_sk - and d_moy=12 - and d_year=2002 - union all - select ss_ext_sales_price as ext_price, - ss_sold_date_sk as sold_date_sk, - ss_item_sk as sold_item_sk, - ss_sold_time_sk as time_sk - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - and d_moy=12 - and d_year=2002 - ) tmp,time_dim - where - sold_item_sk = i_item_sk - and i_manager_id=1 - and time_sk = t_time_sk - and (t_meal_time = 'breakfast' or t_meal_time = 'dinner') - group by i_brand, i_brand_id,t_hour,t_minute - order by ext_price desc, i_brand_id - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query72.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query72.groovy deleted file mode 100644 index 5880246f558a34..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query72.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query72") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and (d3.d_date > (d1.d_date + INTERVAL '5' DAY)) - and hd_buy_potential = '1001-5000' - and d1.d_year = 1998 - and cd_marital_status = 'S' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100""" - qt_ds_shape_72 ''' - explain shape plan - select i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and (d3.d_date > (d1.d_date + INTERVAL '5' DAY)) - and hd_buy_potential = '1001-5000' - and d1.d_year = 1998 - and cd_marital_status = 'S' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query73.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query73.groovy deleted file mode 100644 index fbd8df6bec621e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query73.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query73") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '5001-10000') - and household_demographics.hd_vehicle_count > 0 - and case when household_demographics.hd_vehicle_count > 0 then - household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Williamson County','Williamson County','Williamson County','Williamson County') - group by ss_ticket_number,ss_customer_sk) dj,customer - where ss_customer_sk = c_customer_sk - and cnt between 1 and 5 - order by cnt desc, c_last_name asc""" - qt_ds_shape_73 ''' - explain shape plan - select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '5001-10000') - and household_demographics.hd_vehicle_count > 0 - and case when household_demographics.hd_vehicle_count > 0 then - household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Williamson County','Williamson County','Williamson County','Williamson County') - group by ss_ticket_number,ss_customer_sk) dj,customer - where ss_customer_sk = c_customer_sk - and cnt between 1 and 5 - order by cnt desc, c_last_name asc - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query74.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query74.groovy deleted file mode 100644 index 90580c2e6b756b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query74.groovy +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query74") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,max(ss_net_paid) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,max(ws_net_paid) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - ) - select - t_s_secyear.customer_id, t_s_secyear.customer_first_name, t_s_secyear.customer_last_name - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.year = 1999 - and t_s_secyear.year = 1999+1 - and t_w_firstyear.year = 1999 - and t_w_secyear.year = 1999+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - order by 1,3,2 -limit 100""" - qt_ds_shape_74 ''' - explain shape plan - with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,max(ss_net_paid) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,max(ws_net_paid) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - ) - select - t_s_secyear.customer_id, t_s_secyear.customer_first_name, t_s_secyear.customer_last_name - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.year = 1999 - and t_s_secyear.year = 1999+1 - and t_w_firstyear.year = 1999 - and t_w_secyear.year = 1999+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - order by 1,3,2 -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query75.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query75.groovy deleted file mode 100644 index 775798604b2ee8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query75.groovy +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query75") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """WITH all_sales AS ( - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,SUM(sales_cnt) AS sales_cnt - ,SUM(sales_amt) AS sales_amt - FROM (SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,cs_quantity - COALESCE(cr_return_quantity,0) AS sales_cnt - ,cs_ext_sales_price - COALESCE(cr_return_amount,0.0) AS sales_amt - FROM catalog_sales JOIN item ON i_item_sk=cs_item_sk - JOIN date_dim ON d_date_sk=cs_sold_date_sk - LEFT JOIN catalog_returns ON (cs_order_number=cr_order_number - AND cs_item_sk=cr_item_sk) - WHERE i_category='Sports' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ss_quantity - COALESCE(sr_return_quantity,0) AS sales_cnt - ,ss_ext_sales_price - COALESCE(sr_return_amt,0.0) AS sales_amt - FROM store_sales JOIN item ON i_item_sk=ss_item_sk - JOIN date_dim ON d_date_sk=ss_sold_date_sk - LEFT JOIN store_returns ON (ss_ticket_number=sr_ticket_number - AND ss_item_sk=sr_item_sk) - WHERE i_category='Sports' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ws_quantity - COALESCE(wr_return_quantity,0) AS sales_cnt - ,ws_ext_sales_price - COALESCE(wr_return_amt,0.0) AS sales_amt - FROM web_sales JOIN item ON i_item_sk=ws_item_sk - JOIN date_dim ON d_date_sk=ws_sold_date_sk - LEFT JOIN web_returns ON (ws_order_number=wr_order_number - AND ws_item_sk=wr_item_sk) - WHERE i_category='Sports') sales_detail - GROUP BY d_year, i_brand_id, i_class_id, i_category_id, i_manufact_id) - SELECT prev_yr.d_year AS prev_year - ,curr_yr.d_year AS year - ,curr_yr.i_brand_id - ,curr_yr.i_class_id - ,curr_yr.i_category_id - ,curr_yr.i_manufact_id - ,prev_yr.sales_cnt AS prev_yr_cnt - ,curr_yr.sales_cnt AS curr_yr_cnt - ,curr_yr.sales_cnt-prev_yr.sales_cnt AS sales_cnt_diff - ,curr_yr.sales_amt-prev_yr.sales_amt AS sales_amt_diff - FROM all_sales curr_yr, all_sales prev_yr - WHERE curr_yr.i_brand_id=prev_yr.i_brand_id - AND curr_yr.i_class_id=prev_yr.i_class_id - AND curr_yr.i_category_id=prev_yr.i_category_id - AND curr_yr.i_manufact_id=prev_yr.i_manufact_id - AND curr_yr.d_year=2002 - AND prev_yr.d_year=2002-1 - AND CAST(curr_yr.sales_cnt AS DECIMAL(17,2))/CAST(prev_yr.sales_cnt AS DECIMAL(17,2))<0.9 - ORDER BY sales_cnt_diff,sales_amt_diff - limit 100""" - qt_ds_shape_75 ''' - explain shape plan - WITH all_sales AS ( - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,SUM(sales_cnt) AS sales_cnt - ,SUM(sales_amt) AS sales_amt - FROM (SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,cs_quantity - COALESCE(cr_return_quantity,0) AS sales_cnt - ,cs_ext_sales_price - COALESCE(cr_return_amount,0.0) AS sales_amt - FROM catalog_sales JOIN item ON i_item_sk=cs_item_sk - JOIN date_dim ON d_date_sk=cs_sold_date_sk - LEFT JOIN catalog_returns ON (cs_order_number=cr_order_number - AND cs_item_sk=cr_item_sk) - WHERE i_category='Sports' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ss_quantity - COALESCE(sr_return_quantity,0) AS sales_cnt - ,ss_ext_sales_price - COALESCE(sr_return_amt,0.0) AS sales_amt - FROM store_sales JOIN item ON i_item_sk=ss_item_sk - JOIN date_dim ON d_date_sk=ss_sold_date_sk - LEFT JOIN store_returns ON (ss_ticket_number=sr_ticket_number - AND ss_item_sk=sr_item_sk) - WHERE i_category='Sports' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ws_quantity - COALESCE(wr_return_quantity,0) AS sales_cnt - ,ws_ext_sales_price - COALESCE(wr_return_amt,0.0) AS sales_amt - FROM web_sales JOIN item ON i_item_sk=ws_item_sk - JOIN date_dim ON d_date_sk=ws_sold_date_sk - LEFT JOIN web_returns ON (ws_order_number=wr_order_number - AND ws_item_sk=wr_item_sk) - WHERE i_category='Sports') sales_detail - GROUP BY d_year, i_brand_id, i_class_id, i_category_id, i_manufact_id) - SELECT prev_yr.d_year AS prev_year - ,curr_yr.d_year AS year - ,curr_yr.i_brand_id - ,curr_yr.i_class_id - ,curr_yr.i_category_id - ,curr_yr.i_manufact_id - ,prev_yr.sales_cnt AS prev_yr_cnt - ,curr_yr.sales_cnt AS curr_yr_cnt - ,curr_yr.sales_cnt-prev_yr.sales_cnt AS sales_cnt_diff - ,curr_yr.sales_amt-prev_yr.sales_amt AS sales_amt_diff - FROM all_sales curr_yr, all_sales prev_yr - WHERE curr_yr.i_brand_id=prev_yr.i_brand_id - AND curr_yr.i_class_id=prev_yr.i_class_id - AND curr_yr.i_category_id=prev_yr.i_category_id - AND curr_yr.i_manufact_id=prev_yr.i_manufact_id - AND curr_yr.d_year=2002 - AND prev_yr.d_year=2002-1 - AND CAST(curr_yr.sales_cnt AS DECIMAL(17,2))/CAST(prev_yr.sales_cnt AS DECIMAL(17,2))<0.9 - ORDER BY sales_cnt_diff,sales_amt_diff - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query76.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query76.groovy deleted file mode 100644 index 4937f060c5e736..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query76.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query76") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( - SELECT 'store' as channel, 'ss_customer_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price - FROM store_sales, item, date_dim - WHERE ss_customer_sk IS NULL - AND ss_sold_date_sk=d_date_sk - AND ss_item_sk=i_item_sk - UNION ALL - SELECT 'web' as channel, 'ws_promo_sk' col_name, d_year, d_qoy, i_category, ws_ext_sales_price ext_sales_price - FROM web_sales, item, date_dim - WHERE ws_promo_sk IS NULL - AND ws_sold_date_sk=d_date_sk - AND ws_item_sk=i_item_sk - UNION ALL - SELECT 'catalog' as channel, 'cs_bill_customer_sk' col_name, d_year, d_qoy, i_category, cs_ext_sales_price ext_sales_price - FROM catalog_sales, item, date_dim - WHERE cs_bill_customer_sk IS NULL - AND cs_sold_date_sk=d_date_sk - AND cs_item_sk=i_item_sk) foo -GROUP BY channel, col_name, d_year, d_qoy, i_category -ORDER BY channel, col_name, d_year, d_qoy, i_category -limit 100""" - qt_ds_shape_76 ''' - explain shape plan - select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( - SELECT 'store' as channel, 'ss_customer_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price - FROM store_sales, item, date_dim - WHERE ss_customer_sk IS NULL - AND ss_sold_date_sk=d_date_sk - AND ss_item_sk=i_item_sk - UNION ALL - SELECT 'web' as channel, 'ws_promo_sk' col_name, d_year, d_qoy, i_category, ws_ext_sales_price ext_sales_price - FROM web_sales, item, date_dim - WHERE ws_promo_sk IS NULL - AND ws_sold_date_sk=d_date_sk - AND ws_item_sk=i_item_sk - UNION ALL - SELECT 'catalog' as channel, 'cs_bill_customer_sk' col_name, d_year, d_qoy, i_category, cs_ext_sales_price ext_sales_price - FROM catalog_sales, item, date_dim - WHERE cs_bill_customer_sk IS NULL - AND cs_sold_date_sk=d_date_sk - AND cs_item_sk=i_item_sk) foo -GROUP BY channel, col_name, d_year, d_qoy, i_category -ORDER BY channel, col_name, d_year, d_qoy, i_category -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query77.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query77.groovy deleted file mode 100644 index b188b1982d560c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query77.groovy +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query77") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as - (select s_store_sk, - sum(ss_ext_sales_price) as sales, - sum(ss_net_profit) as profit - from store_sales, - date_dim, - store - where ss_sold_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and ss_store_sk = s_store_sk - group by s_store_sk) - , - sr as - (select s_store_sk, - sum(sr_return_amt) as returns, - sum(sr_net_loss) as profit_loss - from store_returns, - date_dim, - store - where sr_returned_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and sr_store_sk = s_store_sk - group by s_store_sk), - cs as - (select cs_call_center_sk, - sum(cs_ext_sales_price) as sales, - sum(cs_net_profit) as profit - from catalog_sales, - date_dim - where cs_sold_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - group by cs_call_center_sk - ), - cr as - (select cr_call_center_sk, - sum(cr_return_amount) as returns, - sum(cr_net_loss) as profit_loss - from catalog_returns, - date_dim - where cr_returned_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - group by cr_call_center_sk - ), - ws as - ( select wp_web_page_sk, - sum(ws_ext_sales_price) as sales, - sum(ws_net_profit) as profit - from web_sales, - date_dim, - web_page - where ws_sold_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and ws_web_page_sk = wp_web_page_sk - group by wp_web_page_sk), - wr as - (select wp_web_page_sk, - sum(wr_return_amt) as returns, - sum(wr_net_loss) as profit_loss - from web_returns, - date_dim, - web_page - where wr_returned_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and wr_web_page_sk = wp_web_page_sk - group by wp_web_page_sk) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , ss.s_store_sk as id - , sales - , coalesce(returns, 0) as returns - , (profit - coalesce(profit_loss,0)) as profit - from ss left join sr - on ss.s_store_sk = sr.s_store_sk - union all - select 'catalog channel' as channel - , cs_call_center_sk as id - , sales - , returns - , (profit - profit_loss) as profit - from cs - , cr - union all - select 'web channel' as channel - , ws.wp_web_page_sk as id - , sales - , coalesce(returns, 0) returns - , (profit - coalesce(profit_loss,0)) as profit - from ws left join wr - on ws.wp_web_page_sk = wr.wp_web_page_sk - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_77 ''' - explain shape plan - with ss as - (select s_store_sk, - sum(ss_ext_sales_price) as sales, - sum(ss_net_profit) as profit - from store_sales, - date_dim, - store - where ss_sold_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and ss_store_sk = s_store_sk - group by s_store_sk) - , - sr as - (select s_store_sk, - sum(sr_return_amt) as returns, - sum(sr_net_loss) as profit_loss - from store_returns, - date_dim, - store - where sr_returned_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and sr_store_sk = s_store_sk - group by s_store_sk), - cs as - (select cs_call_center_sk, - sum(cs_ext_sales_price) as sales, - sum(cs_net_profit) as profit - from catalog_sales, - date_dim - where cs_sold_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - group by cs_call_center_sk - ), - cr as - (select cr_call_center_sk, - sum(cr_return_amount) as returns, - sum(cr_net_loss) as profit_loss - from catalog_returns, - date_dim - where cr_returned_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - group by cr_call_center_sk - ), - ws as - ( select wp_web_page_sk, - sum(ws_ext_sales_price) as sales, - sum(ws_net_profit) as profit - from web_sales, - date_dim, - web_page - where ws_sold_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and ws_web_page_sk = wp_web_page_sk - group by wp_web_page_sk), - wr as - (select wp_web_page_sk, - sum(wr_return_amt) as returns, - sum(wr_net_loss) as profit_loss - from web_returns, - date_dim, - web_page - where wr_returned_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and wr_web_page_sk = wp_web_page_sk - group by wp_web_page_sk) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , ss.s_store_sk as id - , sales - , coalesce(returns, 0) as returns - , (profit - coalesce(profit_loss,0)) as profit - from ss left join sr - on ss.s_store_sk = sr.s_store_sk - union all - select 'catalog channel' as channel - , cs_call_center_sk as id - , sales - , returns - , (profit - profit_loss) as profit - from cs - , cr - union all - select 'web channel' as channel - , ws.wp_web_page_sk as id - , sales - , coalesce(returns, 0) returns - , (profit - coalesce(profit_loss,0)) as profit - from ws left join wr - on ws.wp_web_page_sk = wr.wp_web_page_sk - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query78.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query78.groovy deleted file mode 100644 index b96778997d3d48..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query78.groovy +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query78") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null and d_year=1998 - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null and d_year=1998 - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null and d_year=1998 - group by d_year, ss_item_sk, ss_customer_sk - ) -select -ss_customer_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=1998 -order by - ss_customer_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100""" - qt_ds_shape_78 ''' - explain shape plan - with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null and d_year=1998 - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null and d_year=1998 - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null and d_year=1998 - group by d_year, ss_item_sk, ss_customer_sk - ) -select -ss_customer_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=1998 -order by - ss_customer_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query79.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query79.groovy deleted file mode 100644 index dda5a1cfd58448..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query79.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query79") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,store.s_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (household_demographics.hd_dep_count = 7 or household_demographics.hd_vehicle_count > -1) - and date_dim.d_dow = 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_number_employees between 200 and 295 - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer - where ss_customer_sk = c_customer_sk - order by c_last_name,c_first_name,substr(s_city,1,30), profit -limit 100""" - qt_ds_shape_79 ''' - explain shape plan - select - c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,store.s_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (household_demographics.hd_dep_count = 7 or household_demographics.hd_vehicle_count > -1) - and date_dim.d_dow = 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_number_employees between 200 and 295 - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer - where ss_customer_sk = c_customer_sk - order by c_last_name,c_first_name,substr(s_city,1,30), profit -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query8.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query8.groovy deleted file mode 100644 index 11f5a76e10d94e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query8.groovy +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100""" - qt_ds_shape_8 ''' - explain shape plan - select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query80.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query80.groovy deleted file mode 100644 index f9e62e637081f6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query80.groovy +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query80") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssr as - (select s_store_id as store_id, - sum(ss_ext_sales_price) as sales, - sum(coalesce(sr_return_amt, 0)) as returns, - sum(ss_net_profit - coalesce(sr_net_loss, 0)) as profit - from store_sales left outer join store_returns on - (ss_item_sk = sr_item_sk and ss_ticket_number = sr_ticket_number), - date_dim, - store, - item, - promotion - where ss_sold_date_sk = d_date_sk - and d_date between cast('2002-08-14' as date) - and (cast('2002-08-14' as date) + interval 30 day) - and ss_store_sk = s_store_sk - and ss_item_sk = i_item_sk - and i_current_price > 50 - and ss_promo_sk = p_promo_sk - and p_channel_tv = 'N' - group by s_store_id) - , - csr as - (select cp_catalog_page_id as catalog_page_id, - sum(cs_ext_sales_price) as sales, - sum(coalesce(cr_return_amount, 0)) as returns, - sum(cs_net_profit - coalesce(cr_net_loss, 0)) as profit - from catalog_sales left outer join catalog_returns on - (cs_item_sk = cr_item_sk and cs_order_number = cr_order_number), - date_dim, - catalog_page, - item, - promotion - where cs_sold_date_sk = d_date_sk - and d_date between cast('2002-08-14' as date) - and (cast('2002-08-14' as date) + interval 30 day) - and cs_catalog_page_sk = cp_catalog_page_sk - and cs_item_sk = i_item_sk - and i_current_price > 50 - and cs_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(ws_ext_sales_price) as sales, - sum(coalesce(wr_return_amt, 0)) as returns, - sum(ws_net_profit - coalesce(wr_net_loss, 0)) as profit - from web_sales left outer join web_returns on - (ws_item_sk = wr_item_sk and ws_order_number = wr_order_number), - date_dim, - web_site, - item, - promotion - where ws_sold_date_sk = d_date_sk - and d_date between cast('2002-08-14' as date) - and (cast('2002-08-14' as date) + interval 30 day) - and ws_web_site_sk = web_site_sk - and ws_item_sk = i_item_sk - and i_current_price > 50 - and ws_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', store_id) as id - , sales - , returns - , profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', catalog_page_id) as id - , sales - , returns - , profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) as id - , sales - , returns - , profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_80 ''' - explain shape plan - with ssr as - (select s_store_id as store_id, - sum(ss_ext_sales_price) as sales, - sum(coalesce(sr_return_amt, 0)) as returns, - sum(ss_net_profit - coalesce(sr_net_loss, 0)) as profit - from store_sales left outer join store_returns on - (ss_item_sk = sr_item_sk and ss_ticket_number = sr_ticket_number), - date_dim, - store, - item, - promotion - where ss_sold_date_sk = d_date_sk - and d_date between cast('2002-08-14' as date) - and (cast('2002-08-14' as date) + interval 30 day) - and ss_store_sk = s_store_sk - and ss_item_sk = i_item_sk - and i_current_price > 50 - and ss_promo_sk = p_promo_sk - and p_channel_tv = 'N' - group by s_store_id) - , - csr as - (select cp_catalog_page_id as catalog_page_id, - sum(cs_ext_sales_price) as sales, - sum(coalesce(cr_return_amount, 0)) as returns, - sum(cs_net_profit - coalesce(cr_net_loss, 0)) as profit - from catalog_sales left outer join catalog_returns on - (cs_item_sk = cr_item_sk and cs_order_number = cr_order_number), - date_dim, - catalog_page, - item, - promotion - where cs_sold_date_sk = d_date_sk - and d_date between cast('2002-08-14' as date) - and (cast('2002-08-14' as date) + interval 30 day) - and cs_catalog_page_sk = cp_catalog_page_sk - and cs_item_sk = i_item_sk - and i_current_price > 50 - and cs_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(ws_ext_sales_price) as sales, - sum(coalesce(wr_return_amt, 0)) as returns, - sum(ws_net_profit - coalesce(wr_net_loss, 0)) as profit - from web_sales left outer join web_returns on - (ws_item_sk = wr_item_sk and ws_order_number = wr_order_number), - date_dim, - web_site, - item, - promotion - where ws_sold_date_sk = d_date_sk - and d_date between cast('2002-08-14' as date) - and (cast('2002-08-14' as date) + interval 30 day) - and ws_web_site_sk = web_site_sk - and ws_item_sk = i_item_sk - and i_current_price > 50 - and ws_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', store_id) as id - , sales - , returns - , profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', catalog_page_id) as id - , sales - , returns - , profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) as id - , sales - , returns - , profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query81.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query81.groovy deleted file mode 100644 index 0f39073baf4b47..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query81.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query81") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as - (select cr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(cr_return_amt_inc_tax) as ctr_total_return - from catalog_returns - ,date_dim - ,customer_address - where cr_returned_date_sk = d_date_sk - and d_year =2001 - and cr_returning_addr_sk = ca_address_sk - group by cr_returning_customer_sk - ,ca_state ) - select c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'TN' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - limit 100""" - qt_ds_shape_81 ''' - explain shape plan - with customer_total_return as - (select cr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(cr_return_amt_inc_tax) as ctr_total_return - from catalog_returns - ,date_dim - ,customer_address - where cr_returned_date_sk = d_date_sk - and d_year =2001 - and cr_returning_addr_sk = ca_address_sk - group by cr_returning_customer_sk - ,ca_state ) - select c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'TN' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query82.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query82.groovy deleted file mode 100644 index 6f3260b90f1206..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query82.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query82") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, store_sales - where i_current_price between 58 and 58+30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('2001-01-13' as date) and (cast('2001-01-13' as date) + interval 60 day) - and i_manufact_id in (259,559,580,485) - and inv_quantity_on_hand between 100 and 500 - and ss_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100""" - qt_ds_shape_82 ''' - explain shape plan - select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, store_sales - where i_current_price between 58 and 58+30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('2001-01-13' as date) and (cast('2001-01-13' as date) + interval 60 day) - and i_manufact_id in (259,559,580,485) - and inv_quantity_on_hand between 100 and 500 - and ss_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query83.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query83.groovy deleted file mode 100644 index da47ab88acbd69..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query83.groovy +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query83") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with sr_items as - (select i_item_id item_id, - sum(sr_return_quantity) sr_item_qty - from store_returns, - item, - date_dim - where sr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-07-13','2001-09-10','2001-11-16'))) - and sr_returned_date_sk = d_date_sk - group by i_item_id), - cr_items as - (select i_item_id item_id, - sum(cr_return_quantity) cr_item_qty - from catalog_returns, - item, - date_dim - where cr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-07-13','2001-09-10','2001-11-16'))) - and cr_returned_date_sk = d_date_sk - group by i_item_id), - wr_items as - (select i_item_id item_id, - sum(wr_return_quantity) wr_item_qty - from web_returns, - item, - date_dim - where wr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-07-13','2001-09-10','2001-11-16'))) - and wr_returned_date_sk = d_date_sk - group by i_item_id) - select sr_items.item_id - ,sr_item_qty - ,sr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 sr_dev - ,cr_item_qty - ,cr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 cr_dev - ,wr_item_qty - ,wr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 wr_dev - ,(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 average - from sr_items - ,cr_items - ,wr_items - where sr_items.item_id=cr_items.item_id - and sr_items.item_id=wr_items.item_id - order by sr_items.item_id - ,sr_item_qty - limit 100""" - qt_ds_shape_83 ''' - explain shape plan - with sr_items as - (select i_item_id item_id, - sum(sr_return_quantity) sr_item_qty - from store_returns, - item, - date_dim - where sr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-07-13','2001-09-10','2001-11-16'))) - and sr_returned_date_sk = d_date_sk - group by i_item_id), - cr_items as - (select i_item_id item_id, - sum(cr_return_quantity) cr_item_qty - from catalog_returns, - item, - date_dim - where cr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-07-13','2001-09-10','2001-11-16'))) - and cr_returned_date_sk = d_date_sk - group by i_item_id), - wr_items as - (select i_item_id item_id, - sum(wr_return_quantity) wr_item_qty - from web_returns, - item, - date_dim - where wr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-07-13','2001-09-10','2001-11-16'))) - and wr_returned_date_sk = d_date_sk - group by i_item_id) - select sr_items.item_id - ,sr_item_qty - ,sr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 sr_dev - ,cr_item_qty - ,cr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 cr_dev - ,wr_item_qty - ,wr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 wr_dev - ,(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 average - from sr_items - ,cr_items - ,wr_items - where sr_items.item_id=cr_items.item_id - and sr_items.item_id=wr_items.item_id - order by sr_items.item_id - ,sr_item_qty - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query84.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query84.groovy deleted file mode 100644 index 98b9cea582ad5c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query84.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query84") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_customer_id as customer_id - , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername - from customer - ,customer_address - ,customer_demographics - ,household_demographics - ,income_band - ,store_returns - where ca_city = 'Woodland' - and c_current_addr_sk = ca_address_sk - and ib_lower_bound >= 60306 - and ib_upper_bound <= 60306 + 50000 - and ib_income_band_sk = hd_income_band_sk - and cd_demo_sk = c_current_cdemo_sk - and hd_demo_sk = c_current_hdemo_sk - and sr_cdemo_sk = cd_demo_sk - order by c_customer_id - limit 100""" - qt_ds_shape_84 ''' - explain shape plan - select c_customer_id as customer_id - , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername - from customer - ,customer_address - ,customer_demographics - ,household_demographics - ,income_band - ,store_returns - where ca_city = 'Woodland' - and c_current_addr_sk = ca_address_sk - and ib_lower_bound >= 60306 - and ib_upper_bound <= 60306 + 50000 - and ib_income_band_sk = hd_income_band_sk - and cd_demo_sk = c_current_cdemo_sk - and hd_demo_sk = c_current_hdemo_sk - and sr_cdemo_sk = cd_demo_sk - order by c_customer_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query85.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query85.groovy deleted file mode 100644 index 8add0348dae1fd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query85.groovy +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query85") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) - from web_sales, web_returns, web_page, customer_demographics cd1, - customer_demographics cd2, customer_address, date_dim, reason - where ws_web_page_sk = wp_web_page_sk - and ws_item_sk = wr_item_sk - and ws_order_number = wr_order_number - and ws_sold_date_sk = d_date_sk and d_year = 1998 - and cd1.cd_demo_sk = wr_refunded_cdemo_sk - and cd2.cd_demo_sk = wr_returning_cdemo_sk - and ca_address_sk = wr_refunded_addr_sk - and r_reason_sk = wr_reason_sk - and - ( - ( - cd1.cd_marital_status = 'D' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Primary' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 100.00 and 150.00 - ) - or - ( - cd1.cd_marital_status = 'S' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'College' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 50.00 and 100.00 - ) - or - ( - cd1.cd_marital_status = 'U' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Advanced Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ca_country = 'United States' - and - ca_state in ('NC', 'TX', 'IA') - and ws_net_profit between 100 and 200 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('WI', 'WV', 'GA') - and ws_net_profit between 150 and 300 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('OK', 'VA', 'KY') - and ws_net_profit between 50 and 250 - ) - ) -group by r_reason_desc -order by substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) -limit 100""" - qt_ds_shape_85 ''' - explain shape plan - select substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) - from web_sales, web_returns, web_page, customer_demographics cd1, - customer_demographics cd2, customer_address, date_dim, reason - where ws_web_page_sk = wp_web_page_sk - and ws_item_sk = wr_item_sk - and ws_order_number = wr_order_number - and ws_sold_date_sk = d_date_sk and d_year = 1998 - and cd1.cd_demo_sk = wr_refunded_cdemo_sk - and cd2.cd_demo_sk = wr_returning_cdemo_sk - and ca_address_sk = wr_refunded_addr_sk - and r_reason_sk = wr_reason_sk - and - ( - ( - cd1.cd_marital_status = 'D' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Primary' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 100.00 and 150.00 - ) - or - ( - cd1.cd_marital_status = 'S' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'College' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 50.00 and 100.00 - ) - or - ( - cd1.cd_marital_status = 'U' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Advanced Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ca_country = 'United States' - and - ca_state in ('NC', 'TX', 'IA') - and ws_net_profit between 100 and 200 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('WI', 'WV', 'GA') - and ws_net_profit between 150 and 300 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('OK', 'VA', 'KY') - and ws_net_profit between 50 and 250 - ) - ) -group by r_reason_desc -order by substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query86.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query86.groovy deleted file mode 100644 index e5247041ec8897..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query86.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query86") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ws_net_paid) as total_sum - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ws_net_paid) desc) as rank_within_parent - from - web_sales - ,date_dim d1 - ,item - where - d1.d_month_seq between 1186 and 1186+11 - and d1.d_date_sk = ws_sold_date_sk - and i_item_sk = ws_item_sk - group by rollup(i_category,i_class) - order by - lochierarchy desc, - case when lochierarchy = 0 then i_category end, - rank_within_parent - limit 100""" - qt_ds_shape_86 ''' - explain shape plan - select - sum(ws_net_paid) as total_sum - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ws_net_paid) desc) as rank_within_parent - from - web_sales - ,date_dim d1 - ,item - where - d1.d_month_seq between 1186 and 1186+11 - and d1.d_date_sk = ws_sold_date_sk - and i_item_sk = ws_item_sk - group by rollup(i_category,i_class) - order by - lochierarchy desc, - case when lochierarchy = 0 then i_category end, - rank_within_parent - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query87.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query87.groovy deleted file mode 100644 index b082d2bdbc1299..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query87.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query87") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select count(*) -from ((select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1202 and 1202+11) - except - (select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1202 and 1202+11) - except - (select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1202 and 1202+11) -) cool_cust -""" - qt_ds_shape_87 ''' - explain shape plan - select count(*) -from ((select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1202 and 1202+11) - except - (select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1202 and 1202+11) - except - (select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1202 and 1202+11) -) cool_cust - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query88.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query88.groovy deleted file mode 100644 index 0b9c4c7c7d2de6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query88.groovy +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query88") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from - (select count(*) h8_30_to_9 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s1, - (select count(*) h9_to_9_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s2, - (select count(*) h9_30_to_10 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s3, - (select count(*) h10_to_10_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s4, - (select count(*) h10_30_to_11 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s5, - (select count(*) h11_to_11_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s6, - (select count(*) h11_30_to_12 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s7, - (select count(*) h12_to_12_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 12 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s8 -""" - qt_ds_shape_88 ''' - explain shape plan - select * -from - (select count(*) h8_30_to_9 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s1, - (select count(*) h9_to_9_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s2, - (select count(*) h9_30_to_10 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s3, - (select count(*) h10_to_10_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s4, - (select count(*) h10_30_to_11 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s5, - (select count(*) h11_to_11_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s6, - (select count(*) h11_30_to_12 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s7, - (select count(*) h12_to_12_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 12 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s8 - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query89.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query89.groovy deleted file mode 100644 index dd5752b43fa4f0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query89.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query89") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from( -select i_category, i_class, i_brand, - s_store_name, s_company_name, - d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, s_store_name, s_company_name) - avg_monthly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - d_year in (2001) and - ((i_category in ('Books','Children','Electronics') and - i_class in ('history','school-uniforms','audio') - ) - or (i_category in ('Men','Sports','Shoes') and - i_class in ('pants','tennis','womens') - )) -group by i_category, i_class, i_brand, - s_store_name, s_company_name, d_moy) tmp1 -where case when (avg_monthly_sales <> 0) then (abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 -order by sum_sales - avg_monthly_sales, s_store_name -limit 100""" - qt_ds_shape_89 ''' - explain shape plan - select * -from( -select i_category, i_class, i_brand, - s_store_name, s_company_name, - d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, s_store_name, s_company_name) - avg_monthly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - d_year in (2001) and - ((i_category in ('Books','Children','Electronics') and - i_class in ('history','school-uniforms','audio') - ) - or (i_category in ('Men','Sports','Shoes') and - i_class in ('pants','tennis','womens') - )) -group by i_category, i_class, i_brand, - s_store_name, s_company_name, d_moy) tmp1 -where case when (avg_monthly_sales <> 0) then (abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 -order by sum_sales - avg_monthly_sales, s_store_name -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query9.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query9.groovy deleted file mode 100644 index d72b880214bcc9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query9.groovy +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_parallel_result_sink=false;" - - def ds = """select case when (select count(*) - from store_sales - where ss_quantity between 1 and 20) > 1071 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 1 and 20) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 1 and 20) end bucket1 , - case when (select count(*) - from store_sales - where ss_quantity between 21 and 40) > 39161 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 21 and 40) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 21 and 40) end bucket2, - case when (select count(*) - from store_sales - where ss_quantity between 41 and 60) > 29434 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 41 and 60) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 41 and 60) end bucket3, - case when (select count(*) - from store_sales - where ss_quantity between 61 and 80) > 6568 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 61 and 80) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 61 and 80) end bucket4, - case when (select count(*) - from store_sales - where ss_quantity between 81 and 100) > 21216 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 81 and 100) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 81 and 100) end bucket5 -from reason -where r_reason_sk = 1 -""" - qt_ds_shape_9 ''' - explain shape plan - select case when (select count(*) - from store_sales - where ss_quantity between 1 and 20) > 1071 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 1 and 20) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 1 and 20) end bucket1 , - case when (select count(*) - from store_sales - where ss_quantity between 21 and 40) > 39161 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 21 and 40) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 21 and 40) end bucket2, - case when (select count(*) - from store_sales - where ss_quantity between 41 and 60) > 29434 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 41 and 60) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 41 and 60) end bucket3, - case when (select count(*) - from store_sales - where ss_quantity between 61 and 80) > 6568 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 61 and 80) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 61 and 80) end bucket4, - case when (select count(*) - from store_sales - where ss_quantity between 81 and 100) > 21216 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 81 and 100) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 81 and 100) end bucket5 -from reason -where r_reason_sk = 1 - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query90.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query90.groovy deleted file mode 100644 index 9bce112a5c2d07..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query90.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query90") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio - from ( select count(*) amc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 12 and 12+1 - and household_demographics.hd_dep_count = 6 - and web_page.wp_char_count between 5000 and 5200) at, - ( select count(*) pmc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 14 and 14+1 - and household_demographics.hd_dep_count = 6 - and web_page.wp_char_count between 5000 and 5200) pt - order by am_pm_ratio - limit 100""" - qt_ds_shape_90 ''' - explain shape plan - select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio - from ( select count(*) amc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 12 and 12+1 - and household_demographics.hd_dep_count = 6 - and web_page.wp_char_count between 5000 and 5200) at, - ( select count(*) pmc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 14 and 14+1 - and household_demographics.hd_dep_count = 6 - and web_page.wp_char_count between 5000 and 5200) pt - order by am_pm_ratio - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query91.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query91.groovy deleted file mode 100644 index e250435336edfa..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query91.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query91") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2000 -and d_moy = 12 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like 'Unknown%' -and ca_gmt_offset = -7 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc""" - qt_ds_shape_91 ''' - explain shape plan - select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2000 -and d_moy = 12 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like 'Unknown%' -and ca_gmt_offset = -7 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query92.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query92.groovy deleted file mode 100644 index 5ec81ae0610cae..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query92.groovy +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query92") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ws_ext_discount_amt) as "Excess Discount Amount" -from - web_sales - ,item - ,date_dim -where -i_manufact_id = 714 -and i_item_sk = ws_item_sk -and d_date between '2000-02-01' and - (cast('2000-02-01' as date) + interval 90 day) -and d_date_sk = ws_sold_date_sk -and ws_ext_discount_amt - > ( - SELECT - 1.3 * avg(ws_ext_discount_amt) - FROM - web_sales - ,date_dim - WHERE - ws_item_sk = i_item_sk - and d_date between '2000-02-01' and - (cast('2000-02-01' as date) + interval 90 day) - and d_date_sk = ws_sold_date_sk - ) -order by sum(ws_ext_discount_amt) -limit 100""" - qt_ds_shape_92 ''' - explain shape plan - select - sum(ws_ext_discount_amt) as "Excess Discount Amount" -from - web_sales - ,item - ,date_dim -where -i_manufact_id = 714 -and i_item_sk = ws_item_sk -and d_date between '2000-02-01' and - (cast('2000-02-01' as date) + interval 90 day) -and d_date_sk = ws_sold_date_sk -and ws_ext_discount_amt - > ( - SELECT - 1.3 * avg(ws_ext_discount_amt) - FROM - web_sales - ,date_dim - WHERE - ws_item_sk = i_item_sk - and d_date between '2000-02-01' and - (cast('2000-02-01' as date) + interval 90 day) - and d_date_sk = ws_sold_date_sk - ) -order by sum(ws_ext_discount_amt) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query93.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query93.groovy deleted file mode 100644 index f0f52194f2d4f6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query93.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query93") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ss_customer_sk - ,sum(act_sales) sumsales - from (select ss_item_sk - ,ss_ticket_number - ,ss_customer_sk - ,case when sr_return_quantity is not null then (ss_quantity-sr_return_quantity)*ss_sales_price - else (ss_quantity*ss_sales_price) end act_sales - from store_sales left outer join store_returns on (sr_item_sk = ss_item_sk - and sr_ticket_number = ss_ticket_number) - ,reason - where sr_reason_sk = r_reason_sk - and r_reason_desc = 'reason 58') t - group by ss_customer_sk - order by sumsales, ss_customer_sk -limit 100""" - qt_ds_shape_93 ''' - explain shape plan - select ss_customer_sk - ,sum(act_sales) sumsales - from (select ss_item_sk - ,ss_ticket_number - ,ss_customer_sk - ,case when sr_return_quantity is not null then (ss_quantity-sr_return_quantity)*ss_sales_price - else (ss_quantity*ss_sales_price) end act_sales - from store_sales left outer join store_returns on (sr_item_sk = ss_item_sk - and sr_ticket_number = ss_ticket_number) - ,reason - where sr_reason_sk = r_reason_sk - and r_reason_desc = 'reason 58') t - group by ss_customer_sk - order by sumsales, ss_customer_sk -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query94.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query94.groovy deleted file mode 100644 index cee27ca765ab57..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query94.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query94") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2002-5-01' and - (cast('2002-5-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'OK' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and exists (select * - from web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) -and not exists(select * - from web_returns wr1 - where ws1.ws_order_number = wr1.wr_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_94 ''' - explain shape plan - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2002-5-01' and - (cast('2002-5-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'OK' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and exists (select * - from web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) -and not exists(select * - from web_returns wr1 - where ws1.ws_order_number = wr1.wr_order_number) -order by count(distinct ws_order_number) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query95.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query95.groovy deleted file mode 100644 index 8a19be8d039e4d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query95.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query95") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2001-4-01' and - (cast('2001-4-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'VA' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_95 ''' - explain shape plan - with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2001-4-01' and - (cast('2001-4-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'VA' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query96.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query96.groovy deleted file mode 100644 index 14645aee61163f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query96.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query96") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select count(*) -from store_sales - ,household_demographics - ,time_dim, store -where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and household_demographics.hd_dep_count = 0 - and store.s_store_name = 'ese' -order by count(*) -limit 100""" - qt_ds_shape_96 ''' - explain shape plan - select count(*) -from store_sales - ,household_demographics - ,time_dim, store -where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and household_demographics.hd_dep_count = 0 - and store.s_store_name = 'ese' -order by count(*) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query97.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query97.groovy deleted file mode 100644 index e30c1ec14a7857..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query97.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query97") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """with ssci as ( -select ss_customer_sk customer_sk - ,ss_item_sk item_sk -from store_sales,date_dim -where ss_sold_date_sk = d_date_sk - and d_month_seq between 1199 and 1199 + 11 and ss_sold_date_sk IS NOT NULL -group by ss_customer_sk - ,ss_item_sk), -csci as( - select cs_bill_customer_sk customer_sk - ,cs_item_sk item_sk -from catalog_sales,date_dim -where cs_sold_date_sk = d_date_sk - and d_month_seq between 1199 and 1199 + 11 and cs_sold_date_sk IS NOT NULL -group by cs_bill_customer_sk - ,cs_item_sk) - select sum(case when ssci.customer_sk is not null and csci.customer_sk is null then 1 else 0 end) store_only - ,sum(case when ssci.customer_sk is null and csci.customer_sk is not null then 1 else 0 end) catalog_only - ,sum(case when ssci.customer_sk is not null and csci.customer_sk is not null then 1 else 0 end) store_and_catalog -from ssci full outer join csci on (ssci.customer_sk=csci.customer_sk - and ssci.item_sk = csci.item_sk) -limit 100""" - qt_ds_shape_97 ''' - explain shape plan - with ssci as ( -select ss_customer_sk customer_sk - ,ss_item_sk item_sk -from store_sales,date_dim -where ss_sold_date_sk = d_date_sk - and d_month_seq between 1199 and 1199 + 11 and ss_sold_date_sk IS NOT NULL -group by ss_customer_sk - ,ss_item_sk), -csci as( - select cs_bill_customer_sk customer_sk - ,cs_item_sk item_sk -from catalog_sales,date_dim -where cs_sold_date_sk = d_date_sk - and d_month_seq between 1199 and 1199 + 11 and cs_sold_date_sk IS NOT NULL -group by cs_bill_customer_sk - ,cs_item_sk) - select sum(case when ssci.customer_sk is not null and csci.customer_sk is null then 1 else 0 end) store_only - ,sum(case when ssci.customer_sk is null and csci.customer_sk is not null then 1 else 0 end) catalog_only - ,sum(case when ssci.customer_sk is not null and csci.customer_sk is not null then 1 else 0 end) store_and_catalog -from ssci full outer join csci on (ssci.customer_sk=csci.customer_sk - and ssci.item_sk = csci.item_sk) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query98.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query98.groovy deleted file mode 100644 index dedcf82dae4d04..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query98.groovy +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query98") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ss_ext_sales_price) as itemrevenue - ,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over - (partition by i_class) as revenueratio -from - store_sales - ,item - ,date_dim -where - ss_item_sk = i_item_sk - and i_category in ('Men', 'Sports', 'Jewelry') - and ss_sold_date_sk = d_date_sk - and d_date between cast('1999-02-05' as date) - and (cast('1999-02-05' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio""" - qt_ds_shape_98 ''' - explain shape plan - select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ss_ext_sales_price) as itemrevenue - ,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over - (partition by i_class) as revenueratio -from - store_sales - ,item - ,date_dim -where - ss_item_sk = i_item_sk - and i_category in ('Men', 'Sports', 'Jewelry') - and ss_sold_date_sk = d_date_sk - and d_date between cast('1999-02-05' as date) - and (cast('1999-02-05' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query99.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query99.groovy deleted file mode 100644 index 6f9e47f2140a90..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query99.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query99") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 30) and - (cs_ship_date_sk - cs_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 60) and - (cs_ship_date_sk - cs_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 90) and - (cs_ship_date_sk - cs_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - catalog_sales - ,warehouse - ,ship_mode - ,call_center - ,date_dim -where - d_month_seq between 1194 and 1194 + 11 -and cs_ship_date_sk = d_date_sk -and cs_warehouse_sk = w_warehouse_sk -and cs_ship_mode_sk = sm_ship_mode_sk -and cs_call_center_sk = cc_call_center_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -limit 100""" - qt_ds_shape_99 ''' - explain shape plan - select - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 30) and - (cs_ship_date_sk - cs_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 60) and - (cs_ship_date_sk - cs_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 90) and - (cs_ship_date_sk - cs_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - catalog_sales - ,warehouse - ,ship_mode - ,call_center - ,date_dim -where - d_month_seq between 1194 and 1194 + 11 -and cs_ship_date_sk = d_date_sk -and cs_warehouse_sk = w_warehouse_sk -and cs_ship_mode_sk = sm_ship_mode_sk -and cs_call_center_sk = cc_call_center_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/load.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/load.groovy deleted file mode 100644 index 365e953b78953f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/load.groovy +++ /dev/null @@ -1,484 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - sql """ - drop table if exists lineitem; - """ - sql """ - CREATE TABLE lineitem ( - l_shipdate DATEV2 NOT NULL, - l_orderkey bigint NOT NULL, - l_linenumber int not null, - l_partkey int NOT NULL, - l_suppkey int not null, - l_quantity decimal(15, 2) NOT NULL, - l_extendedprice decimal(15, 2) NOT NULL, - l_discount decimal(15, 2) NOT NULL, - l_tax decimal(15, 2) NOT NULL, - l_returnflag VARCHAR(1) NOT NULL, - l_linestatus VARCHAR(1) NOT NULL, - l_commitdate DATEV2 NOT NULL, - l_receiptdate DATEV2 NOT NULL, - l_shipinstruct VARCHAR(25) NOT NULL, - l_shipmode VARCHAR(10) NOT NULL, - l_comment VARCHAR(44) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`l_shipdate`, `l_orderkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 96 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "lineitem_orders" - ); - """ - - sql """ - drop table if exists orders; - """ - - sql ''' - CREATE TABLE orders ( - o_orderkey bigint NOT NULL, - o_orderdate DATEV2 NOT NULL, - o_custkey int NOT NULL, - o_orderstatus VARCHAR(1) NOT NULL, - o_totalprice decimal(15, 2) NOT NULL, - o_orderpriority VARCHAR(15) NOT NULL, - o_clerk VARCHAR(15) NOT NULL, - o_shippriority int NOT NULL, - o_comment VARCHAR(79) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`o_orderkey`, `o_orderdate`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`o_orderkey`) BUCKETS 96 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "lineitem_orders" - ); ''' - - sql ''' - drop table if exists partsupp; - ''' - - sql ''' - CREATE TABLE partsupp ( - ps_partkey int NOT NULL, - ps_suppkey int NOT NULL, - ps_availqty int NOT NULL, - ps_supplycost decimal(15, 2) NOT NULL, - ps_comment VARCHAR(199) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`ps_partkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`ps_partkey`) BUCKETS 24 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "part_partsupp" - ); - ''' - - sql ''' - drop table if exists part; - ''' - - sql ''' - CREATE TABLE part ( - p_partkey int NOT NULL, - p_name VARCHAR(55) NOT NULL, - p_mfgr VARCHAR(25) NOT NULL, - p_brand VARCHAR(10) NOT NULL, - p_type VARCHAR(25) NOT NULL, - p_size int NOT NULL, - p_container VARCHAR(10) NOT NULL, - p_retailprice decimal(15, 2) NOT NULL, - p_comment VARCHAR(23) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`p_partkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`p_partkey`) BUCKETS 24 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "part_partsupp" - ); - ''' - - sql ''' - drop table if exists customer; - ''' - - sql ''' - CREATE TABLE customer ( - c_custkey int NOT NULL, - c_name VARCHAR(25) NOT NULL, - c_address VARCHAR(40) NOT NULL, - c_nationkey int NOT NULL, - c_phone VARCHAR(15) NOT NULL, - c_acctbal decimal(15, 2) NOT NULL, - c_mktsegment VARCHAR(10) NOT NULL, - c_comment VARCHAR(117) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`c_custkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 24 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop table if exists supplier - ''' - - sql ''' - CREATE TABLE supplier ( - s_suppkey int NOT NULL, - s_name VARCHAR(25) NOT NULL, - s_address VARCHAR(40) NOT NULL, - s_nationkey int NOT NULL, - s_phone VARCHAR(15) NOT NULL, - s_acctbal decimal(15, 2) NOT NULL, - s_comment VARCHAR(101) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`s_suppkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop table if exists nation; - ''' - - sql ''' - CREATE TABLE `nation` ( - `n_nationkey` int(11) NOT NULL, - `n_name` varchar(25) NOT NULL, - `n_regionkey` int(11) NOT NULL, - `n_comment` varchar(152) NULL - ) ENGINE=OLAP - DUPLICATE KEY(`N_NATIONKEY`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`N_NATIONKEY`) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop table if exists region; - ''' - - sql ''' - CREATE TABLE region ( - r_regionkey int NOT NULL, - r_name VARCHAR(25) NOT NULL, - r_comment VARCHAR(152) - )ENGINE=OLAP - DUPLICATE KEY(`r_regionkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`r_regionkey`) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop view if exists revenue0; - ''' - - sql ''' - create view revenue0 (supplier_no, total_revenue) as - select - l_suppkey, - sum(l_extendedprice * (1 - l_discount)) - from - lineitem - where - l_shipdate >= date '1996-01-01' - and l_shipdate < date '1996-01-01' + interval '3' month - group by - l_suppkey; - ''' - - -sql ''' -alter table lineitem modify column l_shipdate set stats ('ndv'='2539', 'num_nulls'='0', 'min_value'='1992-01-02', 'max_value'='1998-12-01', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_orderkey set stats ('ndv'='1491920000', 'num_nulls'='0', 'min_value'='1', 'max_value'='6000000000', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_linenumber set stats ('ndv'='7', 'num_nulls'='0', 'min_value'='1', 'max_value'='7', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_partkey set stats ('ndv'='200778064', 'num_nulls'='0', 'min_value'='1', 'max_value'='200000000', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_suppkey set stats ('ndv'='10031328', 'num_nulls'='0', 'min_value'='1', 'max_value'='10000000', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_quantity set stats ('ndv'='50', 'num_nulls'='0', 'min_value'='1.00', 'max_value'='50.00', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_extendedprice set stats ('ndv'='3793003', 'num_nulls'='0', 'min_value'='900.00', 'max_value'='104950.00', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_discount set stats ('ndv'='11', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='0.10', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_tax set stats ('ndv'='9', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='0.08', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_returnflag set stats ('ndv'='3', 'num_nulls'='0', 'min_value'='A', 'max_value'='R', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_linestatus set stats ('ndv'='2', 'num_nulls'='0', 'min_value'='F', 'max_value'='O', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_commitdate set stats ('ndv'='2473', 'num_nulls'='0', 'min_value'='1992-01-31', 'max_value'='1998-10-31', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_receiptdate set stats ('ndv'='2568', 'num_nulls'='0', 'min_value'='1992-01-03', 'max_value'='1998-12-31', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_shipinstruct set stats ('ndv'='4', 'num_nulls'='0', 'min_value'='COLLECT COD', 'max_value'='TAKE BACK RETURN', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_shipmode set stats ('ndv'='7', 'num_nulls'='0', 'min_value'='AIR', 'max_value'='TRUCK', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_comment set stats ('ndv'='155259104', 'num_nulls'='0', 'min_value'=' Tiresias ', 'max_value'='zzle? unusual', 'row_count'='5999989709'); -''' - - -sql ''' -alter table orders modify column o_orderkey set stats ('ndv'='1491920000', 'num_nulls'='0', 'min_value'='1', 'max_value'='6000000000', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_orderdate set stats ('ndv'='2417', 'num_nulls'='0', 'min_value'='1992-01-01', 'max_value'='1998-08-02', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_custkey set stats ('ndv'='101410744', 'num_nulls'='0', 'min_value'='1', 'max_value'='149999999', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_orderstatus set stats ('ndv'='3', 'num_nulls'='0', 'min_value'='F', 'max_value'='P', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_totalprice set stats ('ndv'='41700404', 'num_nulls'='0', 'min_value'='810.87', 'max_value'='602901.81', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_orderpriority set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='1-URGENT', 'max_value'='5-LOW', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_clerk set stats ('ndv'='1013689', 'num_nulls'='0', 'min_value'='Clerk#000000001', 'max_value'='Clerk#001000000', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_shippriority set stats ('ndv'='1', 'num_nulls'='0', 'min_value'='0', 'max_value'='0', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_comment set stats ('ndv'='272632352', 'num_nulls'='0', 'min_value'=' Tiresias about the', 'max_value'='zzle? unusual requests w', 'row_count'='1500000000'); -''' - - -sql ''' -alter table partsupp modify column ps_partkey set stats ('ndv'='200778064', 'num_nulls'='0', 'min_value'='1', 'max_value'='200000000', 'row_count'='800000000'); -''' - -sql ''' -alter table partsupp modify column ps_suppkey set stats ('ndv'='10031328', 'num_nulls'='0', 'min_value'='1', 'max_value'='10000000', 'row_count'='800000000'); -''' - -sql ''' -alter table partsupp modify column ps_availqty set stats ('ndv'='10008', 'num_nulls'='0', 'min_value'='1', 'max_value'='9999', 'row_count'='800000000'); -''' - -sql ''' -alter table partsupp modify column ps_supplycost set stats ('ndv'='100279', 'num_nulls'='0', 'min_value'='1.00', 'max_value'='1000.00', 'row_count'='800000000'); -''' - -sql ''' -alter table partsupp modify column ps_comment set stats ('ndv'='303150816', 'num_nulls'='0', 'min_value'=' Tiresias about the accounts detect quickly final foxes. instructions about the blithely unusual theodolites use blithely f', 'max_value'='zzle? unusual requests wake slyly. slyly regular requests are e', 'row_count'='800000000'); -''' - - - -sql ''' -alter table part modify column p_partkey set stats ('ndv'='200778064', 'num_nulls'='0', 'min_value'='1', 'max_value'='200000000', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_name set stats ('ndv'='196191408', 'num_nulls'='0', 'min_value'='almond antique aquamarine azure blush', 'max_value'='yellow white wheat violet red', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_mfgr set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='Manufacturer#1', 'max_value'='Manufacturer#5', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_brand set stats ('ndv'='25', 'num_nulls'='0', 'min_value'='Brand#11', 'max_value'='Brand#55', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_type set stats ('ndv'='150', 'num_nulls'='0', 'min_value'='ECONOMY ANODIZED BRASS', 'max_value'='STANDARD POLISHED TIN', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_size set stats ('ndv'='50', 'num_nulls'='0', 'min_value'='1', 'max_value'='50', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_container set stats ('ndv'='40', 'num_nulls'='0', 'min_value'='JUMBO BAG', 'max_value'='WRAP PKG', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_retailprice set stats ('ndv'='120904', 'num_nulls'='0', 'min_value'='900.00', 'max_value'='2099.00', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_comment set stats ('ndv'='14213541', 'num_nulls'='0', 'min_value'=' Tire', 'max_value'='zzle? speci', 'row_count'='200000000'); -''' - - - -sql ''' -alter table supplier modify column s_suppkey set stats ('ndv'='10031328', 'num_nulls'='0', 'min_value'='1', 'max_value'='10000000', 'row_count'='10000000'); -''' - -sql ''' -alter table supplier modify column s_name set stats ('ndv'='9992858', 'num_nulls'='0', 'min_value'='Supplier#000000001', 'max_value'='Supplier#010000000', 'row_count'='10000000'); -''' - -sql ''' -alter table supplier modify column s_address set stats ('ndv'='10000390', 'num_nulls'='0', 'min_value'=' 04SJW3NWgeWBx2YualVtK62DXnr', 'max_value'='zzzzr MaemffsKy', 'row_count'='10000000'); -''' - -sql ''' -alter table supplier modify column s_nationkey set stats ('ndv'='25', 'num_nulls'='0', 'min_value'='0', 'max_value'='24', 'row_count'='10000000'); -''' - -sql ''' -alter table supplier modify column s_phone set stats ('ndv'='9975965', 'num_nulls'='0', 'min_value'='10-100-101-9215', 'max_value'='34-999-999-3239', 'row_count'='10000000'); -''' - -sql ''' -alter table supplier modify column s_acctbal set stats ('ndv'='1109296', 'num_nulls'='0', 'min_value'='-999.99', 'max_value'='9999.99', 'row_count'='10000000'); -''' - -sql ''' -alter table supplier modify column s_comment set stats ('ndv'='9854117', 'num_nulls'='0', 'min_value'=' Customer accounts are blithely furiousRecommends', 'max_value'='zzle? special packages haggle carefully regular inst', 'row_count'='10000000'); -''' - - - -sql ''' -alter table customer modify column c_custkey set stats ('ndv'='151682592', 'num_nulls'='0', 'min_value'='1', 'max_value'='150000000', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_name set stats ('ndv'='149989056', 'num_nulls'='0', 'min_value'='Customer#000000001', 'max_value'='Customer#150000000', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_address set stats ('ndv'='149316720', 'num_nulls'='0', 'min_value'=' 2WGW,hiM7jHg2', 'max_value'='zzzzyW,aeC8HnFV', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_nationkey set stats ('ndv'='25', 'num_nulls'='0', 'min_value'='0', 'max_value'='24', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_phone set stats ('ndv'='150226160', 'num_nulls'='0', 'min_value'='10-100-100-3024', 'max_value'='34-999-999-9215', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_acctbal set stats ('ndv'='1109296', 'num_nulls'='0', 'min_value'='-999.99', 'max_value'='9999.99', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_mktsegment set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='AUTOMOBILE', 'max_value'='MACHINERY', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_comment set stats ('ndv'='120255488', 'num_nulls'='0', 'min_value'=' Tiresias about the accounts haggle quiet, busy foxe', 'max_value'='zzle? special accounts about the iro', 'row_count'='150000000'); -''' - - - -sql ''' -alter table region modify column r_regionkey set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='0', 'max_value'='4', 'row_count'='5'); -''' - -sql ''' -alter table region modify column r_name set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='AFRICA', 'max_value'='MIDDLE EAST', 'row_count'='5'); -''' - -sql ''' -alter table region modify column r_comment set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='ges. thinly even pinto beans ca', 'max_value'='uickly special accounts cajole carefully blithely close requests. carefully final asymptotes haggle furiousl', 'row_count'='5'); -''' - - - -sql ''' -alter table nation modify column n_nationkey set stats ('ndv'='25', 'num_nulls'='0', 'min_value'='0', 'max_value'='24', 'row_count'='25'); -''' - -sql ''' -alter table nation modify column n_name set stats ('ndv'='25', 'num_nulls'='0', 'min_value'='ALGERIA', 'max_value'='VIETNAM', 'row_count'='25'); -''' - -sql ''' -alter table nation modify column n_regionkey set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='0', 'max_value'='4', 'row_count'='25'); -''' - -sql ''' -alter table nation modify column n_comment set stats ('ndv'='25', 'num_nulls'='0', 'min_value'=' haggle. carefully final deposits detect slyly agai', 'max_value'='y final packages. slow foxes cajole quickly. quickly silent platelets breach ironic accounts. unusual pinto be', 'row_count'='25'); -''' - -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.groovy deleted file mode 100644 index ae7072c55ed913..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - sum(l_extendedprice * (1 - l_discount)) as sum_disc_price, - sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order - from - lineitem - where - l_shipdate <= date '1998-12-01' - interval '90' day - group by - l_returnflag, - l_linestatus - order by - l_returnflag, - l_linestatus; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.groovy deleted file mode 100644 index 2ea2891b4a2c6e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_custkey, - c_name, - sum(l_extendedprice * (1 - l_discount)) as revenue, - c_acctbal, - n_name, - c_address, - c_phone, - c_comment - from - customer, - orders, - lineitem, - nation - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate >= date '1993-10-01' - and o_orderdate < date '1993-10-01' + interval '3' month - and l_returnflag = 'R' - and c_nationkey = n_nationkey - group by - c_custkey, - c_name, - c_acctbal, - c_phone, - n_name, - c_address, - c_comment - order by - revenue desc - limit 20; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.groovy deleted file mode 100644 index 997aa07898b61e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - select - ps_partkey, - sum(ps_supplycost * ps_availqty) as value - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - group by - ps_partkey having - sum(ps_supplycost * ps_availqty) > ( - select - sum(ps_supplycost * ps_availqty) * 0.000002 - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - ) - order by - value desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.groovy deleted file mode 100644 index b78ad09e0e6bee..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - l_shipmode, - sum(case - when o_orderpriority = '1-URGENT' - or o_orderpriority = '2-HIGH' - then 1 - else 0 - end) as high_line_count, - sum(case - when o_orderpriority <> '1-URGENT' - and o_orderpriority <> '2-HIGH' - then 1 - else 0 - end) as low_line_count - from - orders, - lineitem - where - o_orderkey = l_orderkey - and l_shipmode in ('MAIL', 'SHIP') - and l_commitdate < l_receiptdate - and l_shipdate < l_commitdate - and l_receiptdate >= date '1994-01-01' - and l_receiptdate < date '1994-01-01' + interval '1' year - group by - l_shipmode - order by - l_shipmode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.groovy deleted file mode 100644 index 52ce1a04b62a5d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_count, - count(*) as custdist - from - ( - select - c_custkey, - count(o_orderkey) as c_count - from - customer left outer join orders on - c_custkey = o_custkey - and o_comment not like '%special%requests%' - group by - c_custkey - ) as c_orders - group by - c_count - order by - custdist desc, - c_count desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.groovy deleted file mode 100644 index 28d7599970a0e0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - 100.00 * sum(case - when p_type like 'PROMO%' - then l_extendedprice * (1 - l_discount) - else 0 - end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue - from - lineitem, - part - where - l_partkey = p_partkey - and l_shipdate >= date '1995-09-01' - and l_shipdate < date '1995-09-01' + interval '1' month; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q15.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q15.groovy deleted file mode 100644 index 00897a7819000d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q15.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_suppkey, - s_name, - s_address, - s_phone, - total_revenue - from - supplier, - revenue0 - where - s_suppkey = supplier_no - and total_revenue = ( - select - max(total_revenue) - from - revenue0 - ) - order by - s_suppkey; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.groovy deleted file mode 100644 index 8d682c8edb9004..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.groovy +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - p_brand, - p_type, - p_size, - count(distinct ps_suppkey) as supplier_cnt - from - partsupp, - part - where - p_partkey = ps_partkey - and p_brand <> 'Brand#45' - and p_type not like 'MEDIUM POLISHED%' - and p_size in (49, 14, 23, 45, 19, 3, 36, 9) - and ps_suppkey not in ( - select - s_suppkey - from - supplier - where - s_comment like '%Customer%Complaints%' - ) - group by - p_brand, - p_type, - p_size - order by - supplier_cnt desc, - p_brand, - p_type, - p_size; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.groovy deleted file mode 100644 index 52937ee96b7b84..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice) / 7.0 as avg_yearly - from - lineitem, - part - where - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container = 'MED BOX' - and l_quantity < ( - select - 0.2 * avg(l_quantity) - from - lineitem - where - l_partkey = p_partkey - ); - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.groovy deleted file mode 100644 index 2e4245d06ee338..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice, - sum(l_quantity) - from - customer, - orders, - lineitem - where - o_orderkey in ( - select - l_orderkey - from - lineitem - group by - l_orderkey having - sum(l_quantity) > 300 - ) - and c_custkey = o_custkey - and o_orderkey = l_orderkey - group by - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice - order by - o_totalprice desc, - o_orderdate - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.groovy deleted file mode 100644 index c2fffd575f7e3a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice* (1 - l_discount)) as revenue - from - lineitem, - part - where - ( - p_partkey = l_partkey - and p_brand = 'Brand#12' - and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG') - and l_quantity >= 1 and l_quantity <= 1 + 10 - and p_size between 1 and 5 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK') - and l_quantity >= 10 and l_quantity <= 10 + 10 - and p_size between 1 and 10 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#34' - and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG') - and l_quantity >= 20 and l_quantity <= 20 + 10 - and p_size between 1 and 15 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ); - - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.groovy deleted file mode 100644 index cabce2fc86697b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - s_acctbal, - s_name, - n_name, - p_partkey, - p_mfgr, - s_address, - s_phone, - s_comment - from - part, - supplier, - partsupp, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and p_size = 15 - and p_type like '%BRASS' - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - and ps_supplycost = ( - select - min(ps_supplycost) - from - partsupp, - supplier, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - ) - order by - s_acctbal desc, - n_name, - s_name, - p_partkey - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.groovy deleted file mode 100644 index 32efcdde07b9a0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20-rewrite") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan -select -s_name, s_address -from -supplier left semi join -( - select * from - ( - select l_partkey,l_suppkey, 0.5 * sum(l_quantity) as l_q - from lineitem - where l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - group by l_partkey,l_suppkey - ) t2 join - ( - select ps_partkey, ps_suppkey, ps_availqty - from partsupp left semi join part - on ps_partkey = p_partkey and p_name like 'forest%' - ) t1 - on t2.l_partkey = t1.ps_partkey and t2.l_suppkey = t1.ps_suppkey - and t1.ps_availqty > t2.l_q -) t3 -on s_suppkey = t3.ps_suppkey -join nation -where s_nationkey = n_nationkey - and n_name = 'CANADA' -order by s_name -; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.groovy deleted file mode 100644 index ea9819fd3cfed5..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - s_address - from - supplier, - nation - where - s_suppkey in ( - select - ps_suppkey - from - partsupp - where - ps_partkey in ( - select - p_partkey - from - part - where - p_name like 'forest%' - ) - and ps_availqty > ( - select - 0.5 * sum(l_quantity) - from - lineitem - where - l_partkey = ps_partkey - and l_suppkey = ps_suppkey - and l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - ) - ) - and s_nationkey = n_nationkey - and n_name = 'CANADA' - order by - s_name; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.groovy deleted file mode 100644 index 28b5d0f7648fef..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - count(*) as numwait - from - supplier, - lineitem l1, - orders, - nation - where - s_suppkey = l1.l_suppkey - and o_orderkey = l1.l_orderkey - and o_orderstatus = 'F' - and l1.l_receiptdate > l1.l_commitdate - and exists ( - select - * - from - lineitem l2 - where - l2.l_orderkey = l1.l_orderkey - and l2.l_suppkey <> l1.l_suppkey - ) - and not exists ( - select - * - from - lineitem l3 - where - l3.l_orderkey = l1.l_orderkey - and l3.l_suppkey <> l1.l_suppkey - and l3.l_receiptdate > l3.l_commitdate - ) - and s_nationkey = n_nationkey - and n_name = 'SAUDI ARABIA' - group by - s_name - order by - numwait desc, - s_name - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.groovy deleted file mode 100644 index d363362a329540..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - cntrycode, - count(*) as numcust, - sum(c_acctbal) as totacctbal - from - ( - select - substring(c_phone, 1, 2) as cntrycode, - c_acctbal - from - customer - where - substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - and c_acctbal > ( - select - avg(c_acctbal) - from - customer - where - c_acctbal > 0.00 - and substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - ) - and not exists ( - select - * - from - orders - where - o_custkey = c_custkey - ) - ) as custsale - group by - cntrycode - order by - cntrycode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.groovy deleted file mode 100644 index cc0f648b264a5f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - // db = "tpch" - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - l_orderkey, - sum(l_extendedprice * (1 - l_discount)) as revenue, - o_orderdate, - o_shippriority - from - customer, - orders, - lineitem - where - c_mktsegment = 'BUILDING' - and c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate < date '1995-03-15' - and l_shipdate > date '1995-03-15' - group by - l_orderkey, - o_orderdate, - o_shippriority - order by - revenue desc, - o_orderdate - limit 10; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.groovy deleted file mode 100644 index 3454d5bb6fef9d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql 'set parallel_pipeline_task_num=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - - qt_select """ - explain shape plan - select - o_orderpriority, - count(*) as order_count - from - orders - where - o_orderdate >= date '1993-07-01' - and o_orderdate < date '1993-07-01' + interval '3' month - and exists ( - select - * - from - lineitem - where - l_orderkey = o_orderkey - and l_commitdate < l_receiptdate - ) - group by - o_orderpriority - order by - o_orderpriority; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.groovy deleted file mode 100644 index f49ff1f561aef6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - n_name, - sum(l_extendedprice * (1 - l_discount)) as revenue - from - customer, - orders, - lineitem, - supplier, - nation, - region - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and l_suppkey = s_suppkey - and c_nationkey = s_nationkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'ASIA' - and o_orderdate >= date '1994-01-01' - and o_orderdate < date '1994-01-01' + interval '1' year - group by - n_name - order by - revenue desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.groovy deleted file mode 100644 index 43da4d59e6ba78..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.groovy +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - sum(l_extendedprice * l_discount) as revenue - from - lineitem - where - l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - and l_discount between .06 - 0.01 and .06 + 0.01 - and l_quantity < 24; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.groovy deleted file mode 100644 index bac6387a3cb048..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - supp_nation, - cust_nation, - l_year, - sum(volume) as revenue - from - ( - select - n1.n_name as supp_nation, - n2.n_name as cust_nation, - extract(year from l_shipdate) as l_year, - l_extendedprice * (1 - l_discount) as volume - from - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2 - where - s_suppkey = l_suppkey - and o_orderkey = l_orderkey - and c_custkey = o_custkey - and s_nationkey = n1.n_nationkey - and c_nationkey = n2.n_nationkey - and ( - (n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY') - or (n1.n_name = 'GERMANY' and n2.n_name = 'FRANCE') - ) - and l_shipdate between date '1995-01-01' and date '1996-12-31' - ) as shipping - group by - supp_nation, - cust_nation, - l_year - order by - supp_nation, - cust_nation, - l_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.groovy deleted file mode 100644 index 44199ff620ebdf..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - o_year, - sum(case - when nation = 'BRAZIL' then volume - else 0 - end) / sum(volume) as mkt_share - from - ( - select - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) as volume, - n2.n_name as nation - from - part, - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2, - region - where - p_partkey = l_partkey - and s_suppkey = l_suppkey - and l_orderkey = o_orderkey - and o_custkey = c_custkey - and c_nationkey = n1.n_nationkey - and n1.n_regionkey = r_regionkey - and r_name = 'AMERICA' - and s_nationkey = n2.n_nationkey - and o_orderdate between date '1995-01-01' and date '1996-12-31' - and p_type = 'ECONOMY ANODIZED STEEL' - ) as all_nations - group by - o_year - order by - o_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.groovy deleted file mode 100644 index 263f4f6f16d914..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - nation, - o_year, - sum(amount) as sum_profit - from - ( - select - n_name as nation, - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity as amount - from - part, - supplier, - lineitem, - partsupp, - orders, - nation - where - s_suppkey = l_suppkey - and ps_suppkey = l_suppkey - and ps_partkey = l_partkey - and p_partkey = l_partkey - and o_orderkey = l_orderkey - and s_nationkey = n_nationkey - and p_name like '%green%' - ) as profit - group by - nation, - o_year - order by - nation, - o_year desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q1.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q1.groovy deleted file mode 100644 index f90bc9891cb428..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q1.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - sum(l_extendedprice * (1 - l_discount)) as sum_disc_price, - sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order - from - lineitem - where - l_shipdate <= date '1998-12-01' - interval '90' day - group by - l_returnflag, - l_linestatus - order by - l_returnflag, - l_linestatus; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q10.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q10.groovy deleted file mode 100644 index 7079db86ee7c77..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q10.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_custkey, - c_name, - sum(l_extendedprice * (1 - l_discount)) as revenue, - c_acctbal, - n_name, - c_address, - c_phone, - c_comment - from - customer, - orders, - lineitem, - nation - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate >= date '1993-10-01' - and o_orderdate < date '1993-10-01' + interval '3' month - and l_returnflag = 'R' - and c_nationkey = n_nationkey - group by - c_custkey, - c_name, - c_acctbal, - c_phone, - n_name, - c_address, - c_comment - order by - revenue desc - limit 20; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q11.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q11.groovy deleted file mode 100644 index 6c3ad7913a6958..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q11.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - select - ps_partkey, - sum(ps_supplycost * ps_availqty) as value - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - group by - ps_partkey having - sum(ps_supplycost * ps_availqty) > ( - select - sum(ps_supplycost * ps_availqty) * 0.000002 - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - ) - order by - value desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q12.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q12.groovy deleted file mode 100644 index 75846854982e85..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q12.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - l_shipmode, - sum(case - when o_orderpriority = '1-URGENT' - or o_orderpriority = '2-HIGH' - then 1 - else 0 - end) as high_line_count, - sum(case - when o_orderpriority <> '1-URGENT' - and o_orderpriority <> '2-HIGH' - then 1 - else 0 - end) as low_line_count - from - orders, - lineitem - where - o_orderkey = l_orderkey - and l_shipmode in ('MAIL', 'SHIP') - and l_commitdate < l_receiptdate - and l_shipdate < l_commitdate - and l_receiptdate >= date '1994-01-01' - and l_receiptdate < date '1994-01-01' + interval '1' year - group by - l_shipmode - order by - l_shipmode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q13.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q13.groovy deleted file mode 100644 index bb49201e823132..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q13.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_count, - count(*) as custdist - from - ( - select - c_custkey, - count(o_orderkey) as c_count - from - customer left outer join orders on - c_custkey = o_custkey - and o_comment not like '%special%requests%' - group by - c_custkey - ) as c_orders - group by - c_count - order by - custdist desc, - c_count desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q14.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q14.groovy deleted file mode 100644 index 04f031f141ff23..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q14.groovy +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - 100.00 * sum(case - when p_type like 'PROMO%' - then l_extendedprice * (1 - l_discount) - else 0 - end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue - from - lineitem, - part - where - l_partkey = p_partkey - and l_shipdate >= date '1995-09-01' - and l_shipdate < date '1995-09-01' + interval '1' month; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q15.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q15.groovy deleted file mode 100644 index 4c4288133d3e70..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q15.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - s_suppkey, - s_name, - s_address, - s_phone, - total_revenue - from - supplier, - revenue0 - where - s_suppkey = supplier_no - and total_revenue = ( - select - max(total_revenue) - from - revenue0 - ) - order by - s_suppkey; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q16.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q16.groovy deleted file mode 100644 index 4927b90c071a7f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q16.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - p_brand, - p_type, - p_size, - count(distinct ps_suppkey) as supplier_cnt - from - partsupp, - part - where - p_partkey = ps_partkey - and p_brand <> 'Brand#45' - and p_type not like 'MEDIUM POLISHED%' - and p_size in (49, 14, 23, 45, 19, 3, 36, 9) - and ps_suppkey not in ( - select - s_suppkey - from - supplier - where - s_comment like '%Customer%Complaints%' - ) - group by - p_brand, - p_type, - p_size - order by - supplier_cnt desc, - p_brand, - p_type, - p_size; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q17.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q17.groovy deleted file mode 100644 index 748d16fc9d537d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q17.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice) / 7.0 as avg_yearly - from - lineitem, - part - where - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container = 'MED BOX' - and l_quantity < ( - select - 0.2 * avg(l_quantity) - from - lineitem - where - l_partkey = p_partkey - ); - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q18.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q18.groovy deleted file mode 100644 index 9ad92d40f2e8ed..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q18.groovy +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice, - sum(l_quantity) - from - customer, - orders, - lineitem - where - o_orderkey in ( - select - l_orderkey - from - lineitem - group by - l_orderkey having - sum(l_quantity) > 300 - ) - and c_custkey = o_custkey - and o_orderkey = l_orderkey - group by - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice - order by - o_totalprice desc, - o_orderdate - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q19.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q19.groovy deleted file mode 100644 index 86b4ea66b18a6c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q19.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice* (1 - l_discount)) as revenue - from - lineitem, - part - where - ( - p_partkey = l_partkey - and p_brand = 'Brand#12' - and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG') - and l_quantity >= 1 and l_quantity <= 1 + 10 - and p_size between 1 and 5 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK') - and l_quantity >= 10 and l_quantity <= 10 + 10 - and p_size between 1 and 10 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#34' - and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG') - and l_quantity >= 20 and l_quantity <= 20 + 10 - and p_size between 1 and 15 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ); - - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q2.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q2.groovy deleted file mode 100644 index bde54070a72c3e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q2.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - s_acctbal, - s_name, - n_name, - p_partkey, - p_mfgr, - s_address, - s_phone, - s_comment - from - part, - supplier, - partsupp, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and p_size = 15 - and p_type like '%BRASS' - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - and ps_supplycost = ( - select - min(ps_supplycost) - from - partsupp, - supplier, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - ) - order by - s_acctbal desc, - n_name, - s_name, - p_partkey - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.groovy deleted file mode 100644 index 0f618516b8be62..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20-rewrite") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan -select -s_name, s_address -from -supplier left semi join -( - select * from - ( - select l_partkey,l_suppkey, 0.5 * sum(l_quantity) as l_q - from lineitem - where l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - group by l_partkey,l_suppkey - ) t2 join - ( - select ps_partkey, ps_suppkey, ps_availqty - from partsupp left semi join part - on ps_partkey = p_partkey and p_name like 'forest%' - ) t1 - on t2.l_partkey = t1.ps_partkey and t2.l_suppkey = t1.ps_suppkey - and t1.ps_availqty > t2.l_q -) t3 -on s_suppkey = t3.ps_suppkey -join nation -where s_nationkey = n_nationkey - and n_name = 'CANADA' -order by s_name -; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20.groovy deleted file mode 100644 index 50315e7a95aafd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - s_address - from - supplier, - nation - where - s_suppkey in ( - select - ps_suppkey - from - partsupp - where - ps_partkey in ( - select - p_partkey - from - part - where - p_name like 'forest%' - ) - and ps_availqty > ( - select - 0.5 * sum(l_quantity) - from - lineitem - where - l_partkey = ps_partkey - and l_suppkey = ps_suppkey - and l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - ) - ) - and s_nationkey = n_nationkey - and n_name = 'CANADA' - order by - s_name; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q21.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q21.groovy deleted file mode 100644 index ee7e8f9be4cd98..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q21.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - count(*) as numwait - from - supplier, - lineitem l1, - orders, - nation - where - s_suppkey = l1.l_suppkey - and o_orderkey = l1.l_orderkey - and o_orderstatus = 'F' - and l1.l_receiptdate > l1.l_commitdate - and exists ( - select - * - from - lineitem l2 - where - l2.l_orderkey = l1.l_orderkey - and l2.l_suppkey <> l1.l_suppkey - ) - and not exists ( - select - * - from - lineitem l3 - where - l3.l_orderkey = l1.l_orderkey - and l3.l_suppkey <> l1.l_suppkey - and l3.l_receiptdate > l3.l_commitdate - ) - and s_nationkey = n_nationkey - and n_name = 'SAUDI ARABIA' - group by - s_name - order by - numwait desc, - s_name - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q22.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q22.groovy deleted file mode 100644 index c14fdbe4a4c172..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q22.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - cntrycode, - count(*) as numcust, - sum(c_acctbal) as totacctbal - from - ( - select - substring(c_phone, 1, 2) as cntrycode, - c_acctbal - from - customer - where - substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - and c_acctbal > ( - select - avg(c_acctbal) - from - customer - where - c_acctbal > 0.00 - and substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - ) - and not exists ( - select - * - from - orders - where - o_custkey = c_custkey - ) - ) as custsale - group by - cntrycode - order by - cntrycode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q3.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q3.groovy deleted file mode 100644 index 836a30172eb0ec..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q3.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - // db = "tpch" - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - l_orderkey, - sum(l_extendedprice * (1 - l_discount)) as revenue, - o_orderdate, - o_shippriority - from - customer, - orders, - lineitem - where - c_mktsegment = 'BUILDING' - and c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate < date '1995-03-15' - and l_shipdate > date '1995-03-15' - group by - l_orderkey, - o_orderdate, - o_shippriority - order by - revenue desc, - o_orderdate - limit 10; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q4.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q4.groovy deleted file mode 100644 index de22ca1cafda73..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q4.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql 'set parallel_pipeline_task_num=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - - qt_select """ - explain shape plan - select - o_orderpriority, - count(*) as order_count - from - orders - where - o_orderdate >= date '1993-07-01' - and o_orderdate < date '1993-07-01' + interval '3' month - and exists ( - select - * - from - lineitem - where - l_orderkey = o_orderkey - and l_commitdate < l_receiptdate - ) - group by - o_orderpriority - order by - o_orderpriority; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q5.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q5.groovy deleted file mode 100644 index ba5abe1ed4719e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q5.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - n_name, - sum(l_extendedprice * (1 - l_discount)) as revenue - from - customer, - orders, - lineitem, - supplier, - nation, - region - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and l_suppkey = s_suppkey - and c_nationkey = s_nationkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'ASIA' - and o_orderdate >= date '1994-01-01' - and o_orderdate < date '1994-01-01' + interval '1' year - group by - n_name - order by - revenue desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q6.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q6.groovy deleted file mode 100644 index 5a03ad454af74d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q6.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - sum(l_extendedprice * l_discount) as revenue - from - lineitem - where - l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - and l_discount between .06 - 0.01 and .06 + 0.01 - and l_quantity < 24; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q7.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q7.groovy deleted file mode 100644 index 08c7532ba994bc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q7.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - supp_nation, - cust_nation, - l_year, - sum(volume) as revenue - from - ( - select - n1.n_name as supp_nation, - n2.n_name as cust_nation, - extract(year from l_shipdate) as l_year, - l_extendedprice * (1 - l_discount) as volume - from - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2 - where - s_suppkey = l_suppkey - and o_orderkey = l_orderkey - and c_custkey = o_custkey - and s_nationkey = n1.n_nationkey - and c_nationkey = n2.n_nationkey - and ( - (n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY') - or (n1.n_name = 'GERMANY' and n2.n_name = 'FRANCE') - ) - and l_shipdate between date '1995-01-01' and date '1996-12-31' - ) as shipping - group by - supp_nation, - cust_nation, - l_year - order by - supp_nation, - cust_nation, - l_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q8.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q8.groovy deleted file mode 100644 index 56455092954780..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q8.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - o_year, - sum(case - when nation = 'BRAZIL' then volume - else 0 - end) / sum(volume) as mkt_share - from - ( - select - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) as volume, - n2.n_name as nation - from - part, - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2, - region - where - p_partkey = l_partkey - and s_suppkey = l_suppkey - and l_orderkey = o_orderkey - and o_custkey = c_custkey - and c_nationkey = n1.n_nationkey - and n1.n_regionkey = r_regionkey - and r_name = 'AMERICA' - and s_nationkey = n2.n_nationkey - and o_orderdate between date '1995-01-01' and date '1996-12-31' - and p_type = 'ECONOMY ANODIZED STEEL' - ) as all_nations - group by - o_year - order by - o_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q9.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q9.groovy deleted file mode 100644 index 8b5b664928891c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q9.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - nation, - o_year, - sum(amount) as sum_profit - from - ( - select - n_name as nation, - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity as amount - from - part, - supplier, - lineitem, - partsupp, - orders, - nation - where - s_suppkey = l_suppkey - and ps_suppkey = l_suppkey - and ps_partkey = l_partkey - and p_partkey = l_partkey - and o_orderkey = l_orderkey - and s_nationkey = n_nationkey - and p_name like '%green%' - ) as profit - group by - nation, - o_year - order by - nation, - o_year desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.groovy deleted file mode 100644 index 3d9b2012693ce3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.groovy +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("test_pushdown_setop") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_rf_setop """ - explain shape plan - select count() from ((select l_linenumber from lineitem) except (select o_orderkey from orders)) T join region on T.l_linenumber = r_regionkey; - """ - - qt_rf_setop_expr """ - explain shape plan select count() from ((select l_linenumber from lineitem) except (select o_orderkey from orders)) T join region on abs(T.l_linenumber) = r_regionkey; - """ -} - diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q1.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q1.groovy deleted file mode 100644 index 76bffda65f8c4a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q1.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" - sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - sum(l_extendedprice * (1 - l_discount)) as sum_disc_price, - sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order - from - lineitem - where - l_shipdate <= date '1998-12-01' - interval '90' day - group by - l_returnflag, - l_linestatus - order by - l_returnflag, - l_linestatus; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q10.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q10.groovy deleted file mode 100644 index 89ca6dc051fd3f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q10.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_custkey, - c_name, - sum(l_extendedprice * (1 - l_discount)) as revenue, - c_acctbal, - n_name, - c_address, - c_phone, - c_comment - from - customer, - orders, - lineitem, - nation - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate >= date '1993-10-01' - and o_orderdate < date '1993-10-01' + interval '3' month - and l_returnflag = 'R' - and c_nationkey = n_nationkey - group by - c_custkey, - c_name, - c_acctbal, - c_phone, - n_name, - c_address, - c_comment - order by - revenue desc - limit 20; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q11.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q11.groovy deleted file mode 100644 index b21ba41bf4187b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q11.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - - qt_select """ - explain shape plan - select - ps_partkey, - sum(ps_supplycost * ps_availqty) as value - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - group by - ps_partkey having - sum(ps_supplycost * ps_availqty) > ( - select - sum(ps_supplycost * ps_availqty) * 0.000002 - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - ) - order by - value desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q12.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q12.groovy deleted file mode 100644 index ced1db1a561937..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q12.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - l_shipmode, - sum(case - when o_orderpriority = '1-URGENT' - or o_orderpriority = '2-HIGH' - then 1 - else 0 - end) as high_line_count, - sum(case - when o_orderpriority <> '1-URGENT' - and o_orderpriority <> '2-HIGH' - then 1 - else 0 - end) as low_line_count - from - orders, - lineitem - where - o_orderkey = l_orderkey - and l_shipmode in ('MAIL', 'SHIP') - and l_commitdate < l_receiptdate - and l_shipdate < l_commitdate - and l_receiptdate >= date '1994-01-01' - and l_receiptdate < date '1994-01-01' + interval '1' year - group by - l_shipmode - order by - l_shipmode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q13.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q13.groovy deleted file mode 100644 index b7eaca41ac5d36..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q13.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_count, - count(*) as custdist - from - ( - select - c_custkey, - count(o_orderkey) as c_count - from - customer left outer join orders on - c_custkey = o_custkey - and o_comment not like '%special%requests%' - group by - c_custkey - ) as c_orders - group by - c_count - order by - custdist desc, - c_count desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q14.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q14.groovy deleted file mode 100644 index 5617134c86680c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q14.groovy +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - 100.00 * sum(case - when p_type like 'PROMO%' - then l_extendedprice * (1 - l_discount) - else 0 - end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue - from - lineitem, - part - where - l_partkey = p_partkey - and l_shipdate >= date '1995-09-01' - and l_shipdate < date '1995-09-01' + interval '1' month; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q15.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q15.groovy deleted file mode 100644 index 02ad529f9b314d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q15.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_suppkey, - s_name, - s_address, - s_phone, - total_revenue - from - supplier, - revenue0 - where - s_suppkey = supplier_no - and total_revenue = ( - select - max(total_revenue) - from - revenue0 - ) - order by - s_suppkey; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q16.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q16.groovy deleted file mode 100644 index 333b6d837cac19..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q16.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - p_brand, - p_type, - p_size, - count(distinct ps_suppkey) as supplier_cnt - from - partsupp, - part - where - p_partkey = ps_partkey - and p_brand <> 'Brand#45' - and p_type not like 'MEDIUM POLISHED%' - and p_size in (49, 14, 23, 45, 19, 3, 36, 9) - and ps_suppkey not in ( - select - s_suppkey - from - supplier - where - s_comment like '%Customer%Complaints%' - ) - group by - p_brand, - p_type, - p_size - order by - supplier_cnt desc, - p_brand, - p_type, - p_size; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q17.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q17.groovy deleted file mode 100644 index 75ee0508980951..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q17.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice) / 7.0 as avg_yearly - from - lineitem, - part - where - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container = 'MED BOX' - and l_quantity < ( - select - 0.2 * avg(l_quantity) - from - lineitem - where - l_partkey = p_partkey - ); - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q18.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q18.groovy deleted file mode 100644 index 425d7da6119e43..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q18.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice, - sum(l_quantity) - from - customer, - orders, - lineitem - where - o_orderkey in ( - select - l_orderkey - from - lineitem - group by - l_orderkey having - sum(l_quantity) > 300 - ) - and c_custkey = o_custkey - and o_orderkey = l_orderkey - group by - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice - order by - o_totalprice desc, - o_orderdate - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q19.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q19.groovy deleted file mode 100644 index f5a3aa43a8dd29..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q19.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice* (1 - l_discount)) as revenue - from - lineitem, - part - where - ( - p_partkey = l_partkey - and p_brand = 'Brand#12' - and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG') - and l_quantity >= 1 and l_quantity <= 1 + 10 - and p_size between 1 and 5 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK') - and l_quantity >= 10 and l_quantity <= 10 + 10 - and p_size between 1 and 10 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#34' - and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG') - and l_quantity >= 20 and l_quantity <= 20 + 10 - and p_size between 1 and 15 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ); - - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q2.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q2.groovy deleted file mode 100644 index ae39c1116b805d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q2.groovy +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_acctbal, - s_name, - n_name, - p_partkey, - p_mfgr, - s_address, - s_phone, - s_comment - from - part, - supplier, - partsupp, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and p_size = 15 - and p_type like '%BRASS' - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - and ps_supplycost = ( - select - min(ps_supplycost) - from - partsupp, - supplier, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - ) - order by - s_acctbal desc, - n_name, - s_name, - p_partkey - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.groovy deleted file mode 100644 index 97e6a2272c60e4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20-rewrite") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan -select -s_name, s_address -from -supplier left semi join -( - select * from - ( - select l_partkey,l_suppkey, 0.5 * sum(l_quantity) as l_q - from lineitem - where l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - group by l_partkey,l_suppkey - ) t2 join - ( - select ps_partkey, ps_suppkey, ps_availqty - from partsupp left semi join part - on ps_partkey = p_partkey and p_name like 'forest%' - ) t1 - on t2.l_partkey = t1.ps_partkey and t2.l_suppkey = t1.ps_suppkey - and t1.ps_availqty > t2.l_q -) t3 -on s_suppkey = t3.ps_suppkey -join nation -where s_nationkey = n_nationkey - and n_name = 'CANADA' -order by s_name -; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20.groovy deleted file mode 100644 index f24004cec88626..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - s_address - from - supplier, - nation - where - s_suppkey in ( - select - ps_suppkey - from - partsupp - where - ps_partkey in ( - select - p_partkey - from - part - where - p_name like 'forest%' - ) - and ps_availqty > ( - select - 0.5 * sum(l_quantity) - from - lineitem - where - l_partkey = ps_partkey - and l_suppkey = ps_suppkey - and l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - ) - ) - and s_nationkey = n_nationkey - and n_name = 'CANADA' - order by - s_name; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q21.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q21.groovy deleted file mode 100644 index 6bc96c09407a3e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q21.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - count(*) as numwait - from - supplier, - lineitem l1, - orders, - nation - where - s_suppkey = l1.l_suppkey - and o_orderkey = l1.l_orderkey - and o_orderstatus = 'F' - and l1.l_receiptdate > l1.l_commitdate - and exists ( - select - * - from - lineitem l2 - where - l2.l_orderkey = l1.l_orderkey - and l2.l_suppkey <> l1.l_suppkey - ) - and not exists ( - select - * - from - lineitem l3 - where - l3.l_orderkey = l1.l_orderkey - and l3.l_suppkey <> l1.l_suppkey - and l3.l_receiptdate > l3.l_commitdate - ) - and s_nationkey = n_nationkey - and n_name = 'SAUDI ARABIA' - group by - s_name - order by - numwait desc, - s_name - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q22.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q22.groovy deleted file mode 100644 index e4c5d7ceaa2411..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q22.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - cntrycode, - count(*) as numcust, - sum(c_acctbal) as totacctbal - from - ( - select - substring(c_phone, 1, 2) as cntrycode, - c_acctbal - from - customer - where - substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - and c_acctbal > ( - select - avg(c_acctbal) - from - customer - where - c_acctbal > 0.00 - and substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - ) - and not exists ( - select - * - from - orders - where - o_custkey = c_custkey - ) - ) as custsale - group by - cntrycode - order by - cntrycode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q3.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q3.groovy deleted file mode 100644 index 3f4693ae74591c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q3.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - // db = "tpch" - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - l_orderkey, - sum(l_extendedprice * (1 - l_discount)) as revenue, - o_orderdate, - o_shippriority - from - customer, - orders, - lineitem - where - c_mktsegment = 'BUILDING' - and c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate < date '1995-03-15' - and l_shipdate > date '1995-03-15' - group by - l_orderkey, - o_orderdate, - o_shippriority - order by - revenue desc, - o_orderdate - limit 10; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q4.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q4.groovy deleted file mode 100644 index 6a68d84009ddf7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q4.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - - - qt_select """ - explain shape plan - select - o_orderpriority, - count(*) as order_count - from - orders - where - o_orderdate >= date '1993-07-01' - and o_orderdate < date '1993-07-01' + interval '3' month - and exists ( - select - * - from - lineitem - where - l_orderkey = o_orderkey - and l_commitdate < l_receiptdate - ) - group by - o_orderpriority - order by - o_orderpriority; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q5.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q5.groovy deleted file mode 100644 index b334dbbbf3bec7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q5.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - n_name, - sum(l_extendedprice * (1 - l_discount)) as revenue - from - customer, - orders, - lineitem, - supplier, - nation, - region - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and l_suppkey = s_suppkey - and c_nationkey = s_nationkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'ASIA' - and o_orderdate >= date '1994-01-01' - and o_orderdate < date '1994-01-01' + interval '1' year - group by - n_name - order by - revenue desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q6.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q6.groovy deleted file mode 100644 index 5f0e58c5d0d421..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q6.groovy +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice * l_discount) as revenue - from - lineitem - where - l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - and l_discount between .06 - 0.01 and .06 + 0.01 - and l_quantity < 24; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q7.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q7.groovy deleted file mode 100644 index 36eacb5f6fd120..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q7.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - supp_nation, - cust_nation, - l_year, - sum(volume) as revenue - from - ( - select - n1.n_name as supp_nation, - n2.n_name as cust_nation, - extract(year from l_shipdate) as l_year, - l_extendedprice * (1 - l_discount) as volume - from - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2 - where - s_suppkey = l_suppkey - and o_orderkey = l_orderkey - and c_custkey = o_custkey - and s_nationkey = n1.n_nationkey - and c_nationkey = n2.n_nationkey - and ( - (n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY') - or (n1.n_name = 'GERMANY' and n2.n_name = 'FRANCE') - ) - and l_shipdate between date '1995-01-01' and date '1996-12-31' - ) as shipping - group by - supp_nation, - cust_nation, - l_year - order by - supp_nation, - cust_nation, - l_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q8.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q8.groovy deleted file mode 100644 index de9e89b2261f42..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q8.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - o_year, - sum(case - when nation = 'BRAZIL' then volume - else 0 - end) / sum(volume) as mkt_share - from - ( - select - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) as volume, - n2.n_name as nation - from - part, - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2, - region - where - p_partkey = l_partkey - and s_suppkey = l_suppkey - and l_orderkey = o_orderkey - and o_custkey = c_custkey - and c_nationkey = n1.n_nationkey - and n1.n_regionkey = r_regionkey - and r_name = 'AMERICA' - and s_nationkey = n2.n_nationkey - and o_orderdate between date '1995-01-01' and date '1996-12-31' - and p_type = 'ECONOMY ANODIZED STEEL' - ) as all_nations - group by - o_year - order by - o_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q9.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q9.groovy deleted file mode 100644 index 7ee7e2ec186f7b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q9.groovy +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - nation, - o_year, - sum(amount) as sum_profit - from - ( - select - n_name as nation, - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity as amount - from - part, - supplier, - lineitem, - partsupp, - orders, - nation - where - s_suppkey = l_suppkey - and ps_suppkey = l_suppkey - and ps_partkey = l_partkey - and p_partkey = l_partkey - and o_orderkey = l_orderkey - and s_nationkey = n_nationkey - and p_name like '%green%' - ) as profit - group by - nation, - o_year - order by - nation, - o_year desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.groovy deleted file mode 100644 index dfb5a6a0d2e83b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - sum(l_extendedprice * (1 - l_discount)) as sum_disc_price, - sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order - from - lineitem - where - l_shipdate <= date '1998-12-01' - interval '90' day - group by - l_returnflag, - l_linestatus - order by - l_returnflag, - l_linestatus; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.groovy deleted file mode 100644 index 30cc6abdbdef4c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_custkey, - c_name, - sum(l_extendedprice * (1 - l_discount)) as revenue, - c_acctbal, - n_name, - c_address, - c_phone, - c_comment - from - customer, - orders, - lineitem, - nation - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate >= date '1993-10-01' - and o_orderdate < date '1993-10-01' + interval '3' month - and l_returnflag = 'R' - and c_nationkey = n_nationkey - group by - c_custkey, - c_name, - c_acctbal, - c_phone, - n_name, - c_address, - c_comment - order by - revenue desc - limit 20; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.groovy deleted file mode 100644 index fd49b25488a426..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - - qt_select """ - explain shape plan - select - ps_partkey, - sum(ps_supplycost * ps_availqty) as value - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - group by - ps_partkey having - sum(ps_supplycost * ps_availqty) > ( - select - sum(ps_supplycost * ps_availqty) * 0.000002 - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - ) - order by - value desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.groovy deleted file mode 100644 index 3b2b4fe1f536a3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - l_shipmode, - sum(case - when o_orderpriority = '1-URGENT' - or o_orderpriority = '2-HIGH' - then 1 - else 0 - end) as high_line_count, - sum(case - when o_orderpriority <> '1-URGENT' - and o_orderpriority <> '2-HIGH' - then 1 - else 0 - end) as low_line_count - from - orders, - lineitem - where - o_orderkey = l_orderkey - and l_shipmode in ('MAIL', 'SHIP') - and l_commitdate < l_receiptdate - and l_shipdate < l_commitdate - and l_receiptdate >= date '1994-01-01' - and l_receiptdate < date '1994-01-01' + interval '1' year - group by - l_shipmode - order by - l_shipmode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.groovy deleted file mode 100644 index 72252a9779675b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_count, - count(*) as custdist - from - ( - select - c_custkey, - count(o_orderkey) as c_count - from - customer left outer join orders on - c_custkey = o_custkey - and o_comment not like '%special%requests%' - group by - c_custkey - ) as c_orders - group by - c_count - order by - custdist desc, - c_count desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.groovy deleted file mode 100644 index f9a0eeefd33fa0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.groovy +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - 100.00 * sum(case - when p_type like 'PROMO%' - then l_extendedprice * (1 - l_discount) - else 0 - end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue - from - lineitem, - part - where - l_partkey = p_partkey - and l_shipdate >= date '1995-09-01' - and l_shipdate < date '1995-09-01' + interval '1' month; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.groovy deleted file mode 100644 index b79e8d3855e80e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_suppkey, - s_name, - s_address, - s_phone, - total_revenue - from - supplier, - revenue0 - where - s_suppkey = supplier_no - and total_revenue = ( - select - max(total_revenue) - from - revenue0 - ) - order by - s_suppkey; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.groovy deleted file mode 100644 index 2ebca626bbdf19..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - p_brand, - p_type, - p_size, - count(distinct ps_suppkey) as supplier_cnt - from - partsupp, - part - where - p_partkey = ps_partkey - and p_brand <> 'Brand#45' - and p_type not like 'MEDIUM POLISHED%' - and p_size in (49, 14, 23, 45, 19, 3, 36, 9) - and ps_suppkey not in ( - select - s_suppkey - from - supplier - where - s_comment like '%Customer%Complaints%' - ) - group by - p_brand, - p_type, - p_size - order by - supplier_cnt desc, - p_brand, - p_type, - p_size; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.groovy deleted file mode 100644 index 06aa3299d5022f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice) / 7.0 as avg_yearly - from - lineitem, - part - where - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container = 'MED BOX' - and l_quantity < ( - select - 0.2 * avg(l_quantity) - from - lineitem - where - l_partkey = p_partkey - ); - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.groovy deleted file mode 100644 index 4f5a07cfbc1371..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.groovy +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice, - sum(l_quantity) - from - customer, - orders, - lineitem - where - o_orderkey in ( - select - l_orderkey - from - lineitem - group by - l_orderkey having - sum(l_quantity) > 300 - ) - and c_custkey = o_custkey - and o_orderkey = l_orderkey - group by - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice - order by - o_totalprice desc, - o_orderdate - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.groovy deleted file mode 100644 index c800d256d5c66f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice* (1 - l_discount)) as revenue - from - lineitem, - part - where - ( - p_partkey = l_partkey - and p_brand = 'Brand#12' - and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG') - and l_quantity >= 1 and l_quantity <= 1 + 10 - and p_size between 1 and 5 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK') - and l_quantity >= 10 and l_quantity <= 10 + 10 - and p_size between 1 and 10 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#34' - and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG') - and l_quantity >= 20 and l_quantity <= 20 + 10 - and p_size between 1 and 15 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ); - - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.groovy deleted file mode 100644 index ee920ca6e50d78..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_acctbal, - s_name, - n_name, - p_partkey, - p_mfgr, - s_address, - s_phone, - s_comment - from - part, - supplier, - partsupp, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and p_size = 15 - and p_type like '%BRASS' - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - and ps_supplycost = ( - select - min(ps_supplycost) - from - partsupp, - supplier, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - ) - order by - s_acctbal desc, - n_name, - s_name, - p_partkey - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.groovy deleted file mode 100644 index c30a636d3ff73a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20-rewrite") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan -select -s_name, s_address -from -supplier left semi join -( - select * from - ( - select l_partkey,l_suppkey, 0.5 * sum(l_quantity) as l_q - from lineitem - where l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - group by l_partkey,l_suppkey - ) t2 join - ( - select ps_partkey, ps_suppkey, ps_availqty - from partsupp left semi join part - on ps_partkey = p_partkey and p_name like 'forest%' - ) t1 - on t2.l_partkey = t1.ps_partkey and t2.l_suppkey = t1.ps_suppkey - and t1.ps_availqty > t2.l_q -) t3 -on s_suppkey = t3.ps_suppkey -join nation -where s_nationkey = n_nationkey - and n_name = 'CANADA' -order by s_name -; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.groovy deleted file mode 100644 index 4007273901e112..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - s_address - from - supplier, - nation - where - s_suppkey in ( - select - ps_suppkey - from - partsupp - where - ps_partkey in ( - select - p_partkey - from - part - where - p_name like 'forest%' - ) - and ps_availqty > ( - select - 0.5 * sum(l_quantity) - from - lineitem - where - l_partkey = ps_partkey - and l_suppkey = ps_suppkey - and l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - ) - ) - and s_nationkey = n_nationkey - and n_name = 'CANADA' - order by - s_name; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.groovy deleted file mode 100644 index 89216bd88a2b39..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - count(*) as numwait - from - supplier, - lineitem l1, - orders, - nation - where - s_suppkey = l1.l_suppkey - and o_orderkey = l1.l_orderkey - and o_orderstatus = 'F' - and l1.l_receiptdate > l1.l_commitdate - and exists ( - select - * - from - lineitem l2 - where - l2.l_orderkey = l1.l_orderkey - and l2.l_suppkey <> l1.l_suppkey - ) - and not exists ( - select - * - from - lineitem l3 - where - l3.l_orderkey = l1.l_orderkey - and l3.l_suppkey <> l1.l_suppkey - and l3.l_receiptdate > l3.l_commitdate - ) - and s_nationkey = n_nationkey - and n_name = 'SAUDI ARABIA' - group by - s_name - order by - numwait desc, - s_name - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.groovy deleted file mode 100644 index e2f7e1096b1786..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - cntrycode, - count(*) as numcust, - sum(c_acctbal) as totacctbal - from - ( - select - substring(c_phone, 1, 2) as cntrycode, - c_acctbal - from - customer - where - substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - and c_acctbal > ( - select - avg(c_acctbal) - from - customer - where - c_acctbal > 0.00 - and substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - ) - and not exists ( - select - * - from - orders - where - o_custkey = c_custkey - ) - ) as custsale - group by - cntrycode - order by - cntrycode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.groovy deleted file mode 100644 index 01c926f1efc768..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - // db = "tpch" - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - l_orderkey, - sum(l_extendedprice * (1 - l_discount)) as revenue, - o_orderdate, - o_shippriority - from - customer, - orders, - lineitem - where - c_mktsegment = 'BUILDING' - and c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate < date '1995-03-15' - and l_shipdate > date '1995-03-15' - group by - l_orderkey, - o_orderdate, - o_shippriority - order by - revenue desc, - o_orderdate - limit 10; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.groovy deleted file mode 100644 index 55ff31bb4577df..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql 'set parallel_pipeline_task_num=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - - - qt_select """ - explain shape plan - select - o_orderpriority, - count(*) as order_count - from - orders - where - o_orderdate >= date '1993-07-01' - and o_orderdate < date '1993-07-01' + interval '3' month - and exists ( - select - * - from - lineitem - where - l_orderkey = o_orderkey - and l_commitdate < l_receiptdate - ) - group by - o_orderpriority - order by - o_orderpriority; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.groovy deleted file mode 100644 index bdc6d994c27245..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - n_name, - sum(l_extendedprice * (1 - l_discount)) as revenue - from - customer, - orders, - lineitem, - supplier, - nation, - region - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and l_suppkey = s_suppkey - and c_nationkey = s_nationkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'ASIA' - and o_orderdate >= date '1994-01-01' - and o_orderdate < date '1994-01-01' + interval '1' year - group by - n_name - order by - revenue desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.groovy deleted file mode 100644 index f1474a164444af..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.groovy +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice * l_discount) as revenue - from - lineitem - where - l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - and l_discount between .06 - 0.01 and .06 + 0.01 - and l_quantity < 24; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.groovy deleted file mode 100644 index 2adc348b633426..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - supp_nation, - cust_nation, - l_year, - sum(volume) as revenue - from - ( - select - n1.n_name as supp_nation, - n2.n_name as cust_nation, - extract(year from l_shipdate) as l_year, - l_extendedprice * (1 - l_discount) as volume - from - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2 - where - s_suppkey = l_suppkey - and o_orderkey = l_orderkey - and c_custkey = o_custkey - and s_nationkey = n1.n_nationkey - and c_nationkey = n2.n_nationkey - and ( - (n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY') - or (n1.n_name = 'GERMANY' and n2.n_name = 'FRANCE') - ) - and l_shipdate between date '1995-01-01' and date '1996-12-31' - ) as shipping - group by - supp_nation, - cust_nation, - l_year - order by - supp_nation, - cust_nation, - l_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.groovy deleted file mode 100644 index f99324ba64cb5b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - o_year, - sum(case - when nation = 'BRAZIL' then volume - else 0 - end) / sum(volume) as mkt_share - from - ( - select - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) as volume, - n2.n_name as nation - from - part, - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2, - region - where - p_partkey = l_partkey - and s_suppkey = l_suppkey - and l_orderkey = o_orderkey - and o_custkey = c_custkey - and c_nationkey = n1.n_nationkey - and n1.n_regionkey = r_regionkey - and r_name = 'AMERICA' - and s_nationkey = n2.n_nationkey - and o_orderdate between date '1995-01-01' and date '1996-12-31' - and p_type = 'ECONOMY ANODIZED STEEL' - ) as all_nations - group by - o_year - order by - o_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.groovy deleted file mode 100644 index 692afad084f535..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - nation, - o_year, - sum(amount) as sum_profit - from - ( - select - n_name as nation, - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity as amount - from - part, - supplier, - lineitem, - partsupp, - orders, - nation - where - s_suppkey = l_suppkey - and ps_suppkey = l_suppkey - and ps_partkey = l_partkey - and p_partkey = l_partkey - and o_orderkey = l_orderkey - and s_nationkey = n_nationkey - and p_name like '%green%' - ) as profit - group by - nation, - o_year - order by - nation, - o_year desc; - """ -} diff --git a/regression-test/suites/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.groovy b/regression-test/suites/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.groovy index 812bfffeb2f3e0..cee47c818130a7 100644 --- a/regression-test/suites/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.groovy +++ b/regression-test/suites/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.groovy @@ -78,4 +78,13 @@ suite("test_is_ip_address_in_range_function") { qt_sql "SELECT is_ip_address_in_range(NULL, '::ffff:192.168.0.4/128')" qt_sql "SELECT is_ip_address_in_range(NULL, NULL)" + + + sql """ DROP TABLE IF EXISTS ip_test """ + sql """ CREATE TABLE IF NOT EXISTS ip_test(id INT, data string) DISTRIBUTED BY HASH(id) BUCKETS 1 PROPERTIES ('replication_num' = '1');""" + sql """ INSERT INTO ip_test values (54, '2001:db8:4::/128'); """ + sql """ INSERT INTO ip_test values (55, NULL); """ + qt_sql1 """ select * from ip_test order by 1; """ + qt_sql2 "SELECT data, IS_IP_ADDRESS_IN_RANGE(CAST('0.0.0.1' AS STRING), data) FROM ip_test order by 1;" + } \ No newline at end of file diff --git a/regression-test/suites/nereids_clickbench_shape_p0/load.groovy b/regression-test/suites/shape_check/clickbench/load.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/load.groovy rename to regression-test/suites/shape_check/clickbench/load.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query1.groovy b/regression-test/suites/shape_check/clickbench/query1.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query1.groovy rename to regression-test/suites/shape_check/clickbench/query1.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query10.groovy b/regression-test/suites/shape_check/clickbench/query10.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query10.groovy rename to regression-test/suites/shape_check/clickbench/query10.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query11.groovy b/regression-test/suites/shape_check/clickbench/query11.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query11.groovy rename to regression-test/suites/shape_check/clickbench/query11.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query12.groovy b/regression-test/suites/shape_check/clickbench/query12.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query12.groovy rename to regression-test/suites/shape_check/clickbench/query12.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query13.groovy b/regression-test/suites/shape_check/clickbench/query13.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query13.groovy rename to regression-test/suites/shape_check/clickbench/query13.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query14.groovy b/regression-test/suites/shape_check/clickbench/query14.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query14.groovy rename to regression-test/suites/shape_check/clickbench/query14.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query15.groovy b/regression-test/suites/shape_check/clickbench/query15.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query15.groovy rename to regression-test/suites/shape_check/clickbench/query15.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query16.groovy b/regression-test/suites/shape_check/clickbench/query16.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query16.groovy rename to regression-test/suites/shape_check/clickbench/query16.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query17.groovy b/regression-test/suites/shape_check/clickbench/query17.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query17.groovy rename to regression-test/suites/shape_check/clickbench/query17.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query18.groovy b/regression-test/suites/shape_check/clickbench/query18.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query18.groovy rename to regression-test/suites/shape_check/clickbench/query18.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query19.groovy b/regression-test/suites/shape_check/clickbench/query19.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query19.groovy rename to regression-test/suites/shape_check/clickbench/query19.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query2.groovy b/regression-test/suites/shape_check/clickbench/query2.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query2.groovy rename to regression-test/suites/shape_check/clickbench/query2.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query20.groovy b/regression-test/suites/shape_check/clickbench/query20.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query20.groovy rename to regression-test/suites/shape_check/clickbench/query20.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query21.groovy b/regression-test/suites/shape_check/clickbench/query21.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query21.groovy rename to regression-test/suites/shape_check/clickbench/query21.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query22.groovy b/regression-test/suites/shape_check/clickbench/query22.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query22.groovy rename to regression-test/suites/shape_check/clickbench/query22.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query23.groovy b/regression-test/suites/shape_check/clickbench/query23.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query23.groovy rename to regression-test/suites/shape_check/clickbench/query23.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query24.groovy b/regression-test/suites/shape_check/clickbench/query24.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query24.groovy rename to regression-test/suites/shape_check/clickbench/query24.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query25.groovy b/regression-test/suites/shape_check/clickbench/query25.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query25.groovy rename to regression-test/suites/shape_check/clickbench/query25.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query26.groovy b/regression-test/suites/shape_check/clickbench/query26.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query26.groovy rename to regression-test/suites/shape_check/clickbench/query26.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query27.groovy b/regression-test/suites/shape_check/clickbench/query27.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query27.groovy rename to regression-test/suites/shape_check/clickbench/query27.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query28.groovy b/regression-test/suites/shape_check/clickbench/query28.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query28.groovy rename to regression-test/suites/shape_check/clickbench/query28.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query29.groovy b/regression-test/suites/shape_check/clickbench/query29.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query29.groovy rename to regression-test/suites/shape_check/clickbench/query29.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query3.groovy b/regression-test/suites/shape_check/clickbench/query3.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query3.groovy rename to regression-test/suites/shape_check/clickbench/query3.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query30.groovy b/regression-test/suites/shape_check/clickbench/query30.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query30.groovy rename to regression-test/suites/shape_check/clickbench/query30.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query31.groovy b/regression-test/suites/shape_check/clickbench/query31.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query31.groovy rename to regression-test/suites/shape_check/clickbench/query31.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query32.groovy b/regression-test/suites/shape_check/clickbench/query32.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query32.groovy rename to regression-test/suites/shape_check/clickbench/query32.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query33.groovy b/regression-test/suites/shape_check/clickbench/query33.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query33.groovy rename to regression-test/suites/shape_check/clickbench/query33.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query34.groovy b/regression-test/suites/shape_check/clickbench/query34.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query34.groovy rename to regression-test/suites/shape_check/clickbench/query34.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query35.groovy b/regression-test/suites/shape_check/clickbench/query35.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query35.groovy rename to regression-test/suites/shape_check/clickbench/query35.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query36.groovy b/regression-test/suites/shape_check/clickbench/query36.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query36.groovy rename to regression-test/suites/shape_check/clickbench/query36.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query37.groovy b/regression-test/suites/shape_check/clickbench/query37.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query37.groovy rename to regression-test/suites/shape_check/clickbench/query37.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query38.groovy b/regression-test/suites/shape_check/clickbench/query38.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query38.groovy rename to regression-test/suites/shape_check/clickbench/query38.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query39.groovy b/regression-test/suites/shape_check/clickbench/query39.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query39.groovy rename to regression-test/suites/shape_check/clickbench/query39.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query4.groovy b/regression-test/suites/shape_check/clickbench/query4.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query4.groovy rename to regression-test/suites/shape_check/clickbench/query4.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query40.groovy b/regression-test/suites/shape_check/clickbench/query40.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query40.groovy rename to regression-test/suites/shape_check/clickbench/query40.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query41.groovy b/regression-test/suites/shape_check/clickbench/query41.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query41.groovy rename to regression-test/suites/shape_check/clickbench/query41.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query42.groovy b/regression-test/suites/shape_check/clickbench/query42.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query42.groovy rename to regression-test/suites/shape_check/clickbench/query42.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query43.groovy b/regression-test/suites/shape_check/clickbench/query43.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query43.groovy rename to regression-test/suites/shape_check/clickbench/query43.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query5.groovy b/regression-test/suites/shape_check/clickbench/query5.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query5.groovy rename to regression-test/suites/shape_check/clickbench/query5.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query6.groovy b/regression-test/suites/shape_check/clickbench/query6.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query6.groovy rename to regression-test/suites/shape_check/clickbench/query6.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query7.groovy b/regression-test/suites/shape_check/clickbench/query7.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query7.groovy rename to regression-test/suites/shape_check/clickbench/query7.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query8.groovy b/regression-test/suites/shape_check/clickbench/query8.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query8.groovy rename to regression-test/suites/shape_check/clickbench/query8.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query9.groovy b/regression-test/suites/shape_check/clickbench/query9.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query9.groovy rename to regression-test/suites/shape_check/clickbench/query9.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/load.groovy b/regression-test/suites/shape_check/ssb_sf100/load.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/load.groovy rename to regression-test/suites/shape_check/ssb_sf100/load.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/flat.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/flat.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/flat.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/flat.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q1.1.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q1.1.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q1.1.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q1.1.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q1.2.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q1.2.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q1.2.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q1.2.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q1.3.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q1.3.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q1.3.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q1.3.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q2.1.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q2.1.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q2.1.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q2.1.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q2.2.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q2.2.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q2.2.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q2.2.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q2.3.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q2.3.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q2.3.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q2.3.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.1.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q3.1.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.1.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q3.1.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.2.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q3.2.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.2.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q3.2.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.3.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q3.3.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.3.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q3.3.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.4.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q3.4.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.4.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q3.4.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q4.1.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q4.1.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q4.1.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q4.1.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q4.2.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q4.2.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q4.2.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q4.2.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q4.3.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q4.3.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q4.3.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q4.3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/constraints/load.groovy b/regression-test/suites/shape_check/tpcds_sf100/constraints/load.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/constraints/load.groovy rename to regression-test/suites/shape_check/tpcds_sf100/constraints/load.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/constraints/query23.groovy b/regression-test/suites/shape_check/tpcds_sf100/constraints/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/constraints/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf100/constraints/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/gen_rf_prune.py b/regression-test/suites/shape_check/tpcds_sf100/ddl/gen_rf_prune.py similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/gen_rf_prune.py rename to regression-test/suites/shape_check/tpcds_sf100/ddl/gen_rf_prune.py diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/gen_shape.py b/regression-test/suites/shape_check/tpcds_sf100/ddl/gen_shape.py similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/gen_shape.py rename to regression-test/suites/shape_check/tpcds_sf100/ddl/gen_shape.py diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/rf_prune.tmpl b/regression-test/suites/shape_check/tpcds_sf100/ddl/rf_prune.tmpl similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/rf_prune.tmpl rename to regression-test/suites/shape_check/tpcds_sf100/ddl/rf_prune.tmpl diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/shape.tmpl b/regression-test/suites/shape_check/tpcds_sf100/ddl/shape.tmpl similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/shape.tmpl rename to regression-test/suites/shape_check/tpcds_sf100/ddl/shape.tmpl diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/load.groovy b/regression-test/suites/shape_check/tpcds_sf100/load.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/load.groovy rename to regression-test/suites/shape_check/tpcds_sf100/load.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query1.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query1.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query1.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query10.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query10.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query10.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query11.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query11.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query11.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query12.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query12.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query12.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query13.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query14.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query14.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query14.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query15.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query15.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query15.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query16.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query16.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query16.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query17.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query17.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query18.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query18.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query18.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query19.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query2.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query2.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query2.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query20.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query20.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query20.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query21.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query21.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query21.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query22.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query22.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query22.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query23.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query24.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query24.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query24.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query25.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query25.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query26.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query26.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query26.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query27.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query27.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query27.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query28.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query28.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query28.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query29.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query29.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query3.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query3.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query30.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query30.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query30.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query31.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query31.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query31.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query32.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query32.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query32.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query33.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query33.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query33.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query33.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query34.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query34.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query34.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query35.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query35.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query35.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query35.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query36.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query36.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query36.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query37.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query37.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query37.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query38.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query38.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query38.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query39.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query39.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query39.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query4.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query4.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query4.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query40.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query40.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query40.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query41.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query41.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query41.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query42.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query42.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query42.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query43.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query43.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query43.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query44.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query45.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query46.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query46.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query46.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query47.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query47.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query47.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query48.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query48.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query48.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query49.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query49.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query49.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query5.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query5.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query5.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query50.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query50.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query50.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query51.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query51.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query51.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query52.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query52.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query52.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query53.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query53.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query53.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query54.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query55.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query55.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query55.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query56.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query57.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query57.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query57.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query58.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query58.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query59.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query59.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query59.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query6.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query60.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query60.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query60.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query61.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query62.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query62.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query62.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query63.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query63.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query63.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query64.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query64.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query65.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query65.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query66.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query66.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query66.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query67.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query67.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query67.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query68.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query69.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query69.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query69.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query7.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query7.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query7.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query70.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query70.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query70.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query71.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query71.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query71.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query72.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query72.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query72.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query73.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query73.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query73.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query74.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query74.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query74.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query75.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query75.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query75.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query76.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query76.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query76.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query77.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query77.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query77.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query78.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query78.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query78.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query79.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query79.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query79.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query8.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query80.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query80.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query80.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query81.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query81.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query81.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query82.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query82.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query82.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query83.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query83.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query84.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query84.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query84.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query85.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query85.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query85.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query86.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query86.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query86.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query87.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query87.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query87.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query88.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query88.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query88.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query89.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query89.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query89.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query9.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query9.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query9.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query90.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query90.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query90.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query91.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query92.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query92.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query92.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query93.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query93.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query93.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query94.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query94.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query94.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query96.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query96.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query96.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query97.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query97.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query97.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query98.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query98.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query98.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query99.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query99.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query99.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query1.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query1.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query1.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query10.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query10.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query10.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query11.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query11.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query11.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query12.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query12.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query12.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query13.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query14.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query14.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query14.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query15.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query15.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query15.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query16.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query16.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query16.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query17.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query17.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query18.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query18.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query18.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query19.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query2.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query2.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query2.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query20.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query20.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query20.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query21.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query21.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query21.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query22.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query22.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query22.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query23.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query24.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query24.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query25.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query25.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query26.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query26.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query26.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query27.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query27.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query27.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query28.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query28.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query28.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query29.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query29.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query3.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query3.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query30.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query30.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query30.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query31.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query31.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query31.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query32.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query32.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query32.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query33.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query33.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query33.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query33.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query34.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query34.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query34.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query35.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query35.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query35.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query35.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query36.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query36.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query36.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query37.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query37.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query37.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query38.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query38.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query38.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query39.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query39.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query39.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query4.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query4.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query4.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query40.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query40.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query40.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query41.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query41.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query41.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query42.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query42.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query42.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query43.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query43.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query43.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query44.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query45.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query46.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query46.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query46.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query47.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query47.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query47.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query48.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query48.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query48.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query49.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query49.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query49.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query5.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query5.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query5.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query50.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query50.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query50.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query51.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query51.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query51.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query52.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query52.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query52.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query53.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query53.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query53.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query54.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query55.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query55.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query55.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query56.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query57.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query57.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query57.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query58.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query58.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query59.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query59.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query59.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query6.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query60.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query60.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query60.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query61.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query62.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query62.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query62.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query63.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query63.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query63.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query64.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query64.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query65.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query65.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query66.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query66.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query66.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query67.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query67.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query67.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query68.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query69.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query69.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query69.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query7.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query7.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query7.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query70.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query70.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query70.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query71.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query71.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query71.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query72.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query72.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query73.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query73.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query73.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query74.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query74.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query74.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query75.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query75.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query75.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query76.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query76.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query76.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query77.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query77.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query77.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query78.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query78.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query78.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query79.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query79.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query79.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query8.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query80.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query80.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query80.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query81.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query81.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query81.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query82.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query82.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query82.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query83.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query83.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query84.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query84.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query84.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query85.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query85.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query85.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query86.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query86.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query86.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query87.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query87.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query87.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query88.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query88.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query88.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query89.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query89.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query89.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query9.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query9.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query9.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query90.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query90.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query90.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query91.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query92.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query92.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query92.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query93.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query93.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query93.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query94.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query94.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query94.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query96.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query96.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query96.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query97.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query97.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query97.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query98.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query98.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query98.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query99.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query99.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query99.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query1.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query1.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query1.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query10.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query10.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query10.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query11.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query11.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query11.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query12.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query12.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query12.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query13.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query14.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query14.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query14.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query15.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query15.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query15.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query16.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query16.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query16.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query17.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query17.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query17.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query18.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query18.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query18.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query19.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query2.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query2.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query2.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query20.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query20.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query20.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query21.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query21.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query21.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query22.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query22.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query22.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query23.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query24.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query24.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query24.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query25.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query25.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query25.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query26.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query26.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query26.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query27.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query27.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query27.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query28.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query28.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query28.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query29.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query29.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query29.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query3.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query3.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query30.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query30.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query30.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query31.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query31.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query31.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query32.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query32.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query32.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query33.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query33.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query33.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query33.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query34.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query34.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query34.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query35.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query35.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query35.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query35.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query36.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query36.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query36.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query37.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query37.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query37.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query38.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query38.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query38.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query39.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query39.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query39.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query4.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query4.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query4.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query40.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query40.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query40.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query41.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query41.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query41.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query42.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query42.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query42.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query43.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query43.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query43.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query44.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query45.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query46.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query46.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query46.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query47.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query47.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query47.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query48.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query48.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query48.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query49.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query49.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query49.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query5.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query5.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query5.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query50.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query50.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query50.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query51.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query51.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query51.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query52.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query52.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query52.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query53.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query53.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query53.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query54.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query55.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query55.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query55.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query56.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query57.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query57.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query57.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query58.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query58.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query58.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query59.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query59.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query59.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query6.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query60.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query60.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query60.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query61.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query62.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query62.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query62.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query63.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query63.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query63.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query64.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query64.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query64.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query65.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query65.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query65.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query66.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query66.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query66.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query67.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query67.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query67.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query68.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query69.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query69.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query69.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query7.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query7.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query7.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query70.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query70.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query70.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query71.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query71.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query71.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query72.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query72.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query72.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query73.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query73.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query73.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query74.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query74.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query74.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query75.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query75.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query75.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query76.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query76.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query76.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query77.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query77.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query77.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query78.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query78.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query78.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query79.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query79.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query79.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query8.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query80.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query80.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query80.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query81.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query81.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query81.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query82.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query82.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query82.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query83.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query83.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query83.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query83.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query84.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query84.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query84.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query85.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query85.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query85.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query86.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query86.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query86.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query87.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query87.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query87.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query88.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query88.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query88.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query89.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query89.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query89.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query9.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query9.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query9.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query90.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query90.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query90.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query91.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query92.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query92.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query92.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query93.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query93.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query93.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query94.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query94.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query94.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query95.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query96.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query96.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query96.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query97.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query97.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query97.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query98.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query98.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query98.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query99.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query99.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query99.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query1.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query1.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query1.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query10.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query10.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query10.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query11.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query11.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query11.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query12.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query12.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query12.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query13.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query14.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query14.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query14.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query15.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query15.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query15.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query16.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query16.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query16.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query17.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query17.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query17.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query18.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query18.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query18.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query19.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query2.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query2.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query2.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query20.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query20.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query20.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query21.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query21.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query21.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query22.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query22.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query22.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query23.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query24.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query24.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query24.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query25.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query25.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query25.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query26.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query26.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query26.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query27.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query27.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query27.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query28.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query28.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query28.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query29.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query29.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query29.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query3.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query3.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query30.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query30.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query30.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query31.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query31.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query31.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query32.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query32.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query32.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query33.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query33.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query33.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query33.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query34.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query34.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query34.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query35.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query35.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query35.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query35.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query36.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query36.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query36.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query37.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query37.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query37.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query38.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query38.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query38.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query39.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query39.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query39.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query4.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query4.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query4.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query40.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query40.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query40.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query41.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query41.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query41.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query42.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query42.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query42.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query43.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query43.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query43.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query44.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query45.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query46.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query46.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query46.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query47.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query47.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query47.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query48.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query48.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query48.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query49.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query49.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query49.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query5.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query5.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query5.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query50.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query50.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query50.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query51.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query51.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query51.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query52.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query52.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query52.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query53.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query53.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query53.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query54.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query55.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query55.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query55.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query56.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query57.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query57.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query57.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query58.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query58.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query58.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query59.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query59.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query59.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query6.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query60.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query60.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query60.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query61.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query62.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query62.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query62.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query63.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query63.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query63.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query64.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query64.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query64.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query65.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query65.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query65.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query66.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query66.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query66.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query67.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query67.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query67.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query68.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query69.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query69.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query69.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query7.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query7.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query7.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query70.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query70.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query70.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query71.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query71.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query71.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query72.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query72.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query72.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query73.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query73.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query73.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query74.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query74.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query74.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query75.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query75.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query75.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query76.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query76.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query76.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query77.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query77.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query77.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query78.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query78.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query78.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query79.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query79.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query79.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query8.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query80.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query80.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query80.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query81.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query81.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query81.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query82.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query82.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query82.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query83.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query83.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query83.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query83.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query84.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query84.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query84.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query85.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query85.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query85.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query86.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query86.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query86.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query87.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query87.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query87.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query88.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query88.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query88.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query89.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query89.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query89.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query9.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query9.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query9.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query90.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query90.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query90.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query91.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query92.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query92.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query92.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query93.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query93.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query93.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query94.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query94.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query94.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query95.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query96.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query96.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query96.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query97.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query97.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query97.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query98.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query98.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query98.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query99.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query99.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query99.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/tpcds_sf100_stats.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/tpcds_sf100_stats.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/tpcds_sf100_stats.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/tpcds_sf100_stats.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query13.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query19.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query44.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query45.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query54.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query56.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query6.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query61.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query68.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query8.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query91.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query95.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/ddl/gen_shape.py b/regression-test/suites/shape_check/tpcds_sf1000/ddl/gen_shape.py similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/ddl/gen_shape.py rename to regression-test/suites/shape_check/tpcds_sf1000/ddl/gen_shape.py diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/ddl/shape.tmpl b/regression-test/suites/shape_check/tpcds_sf1000/ddl/shape.tmpl similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/ddl/shape.tmpl rename to regression-test/suites/shape_check/tpcds_sf1000/ddl/shape.tmpl diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/eliminate_empty/query10_empty.groovy b/regression-test/suites/shape_check/tpcds_sf1000/eliminate_empty/query10_empty.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/eliminate_empty/query10_empty.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/eliminate_empty/query10_empty.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query1.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query1.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query1.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query10.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query10.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query10.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query11.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query11.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query11.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query12.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query12.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query12.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query13.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query13.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query13.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query14.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query14.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query14.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query15.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query15.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query15.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query16.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query16.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query16.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query17.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query17.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query17.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query18.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query18.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query18.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query19.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query19.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query19.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query2.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query2.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query2.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query20.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query20.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query20.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query21.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query21.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query21.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query22.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query22.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query22.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query23.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query23.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query23.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query24.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query24.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query24.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query25.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query25.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query25.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query26.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query26.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query26.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query27.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query27.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query27.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query28.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query28.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query28.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query29.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query29.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query29.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query3.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query3.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query3.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query30.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query30.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query30.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query31.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query31.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query31.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query32.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query32.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query32.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query34.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query34.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query34.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query36.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query36.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query36.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query37.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query37.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query37.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query38.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query38.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query38.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query39.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query39.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query39.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query4.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query4.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query4.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query40.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query40.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query40.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query41.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query41.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query41.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query42.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query42.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query42.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query43.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query43.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query43.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query44.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query44.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query44.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query45.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query45.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query45.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query46.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query46.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query46.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query47.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query47.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query47.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query48.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query48.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query48.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query49.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query49.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query49.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query5.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query5.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query5.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query50.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query50.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query50.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query51.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query51.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query51.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query52.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query52.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query52.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query53.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query53.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query53.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query54.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query54.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query54.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query55.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query55.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query55.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query56.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query56.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query56.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query57.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query57.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query57.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query58.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query58.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query58.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query59.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query59.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query59.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query6.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query6.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query6.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query60.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query60.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query60.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query61.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query61.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query61.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query62.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query62.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query62.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query63.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query63.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query63.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query64.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query64.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query64.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query65.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query65.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query65.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query66.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query66.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query66.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query67.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query67.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query67.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query68.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query68.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query68.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query69.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query69.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query69.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query7.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query7.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query7.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query70.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query70.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query70.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query71.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query71.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query71.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query72.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query72.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query72.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query73.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query73.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query73.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query74.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query74.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query74.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query75.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query75.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query75.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query76.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query76.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query76.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query77.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query77.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query77.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query78.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query78.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query78.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query79.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query79.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query79.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query8.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query8.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query8.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query80.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query80.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query80.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query81.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query81.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query81.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query82.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query82.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query82.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query84.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query84.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query84.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query85.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query85.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query85.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query86.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query86.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query86.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query87.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query87.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query87.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query88.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query88.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query88.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query89.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query89.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query89.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query9.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query9.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query9.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query90.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query90.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query90.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query91.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query91.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query91.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query92.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query92.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query92.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query93.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query93.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query93.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query94.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query94.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query94.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query95.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query95.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query95.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query96.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query96.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query96.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query97.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query97.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query97.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query98.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query98.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query98.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query99.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query99.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query99.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/load.groovy b/regression-test/suites/shape_check/tpcds_sf1000/load.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/load.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/load.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query1.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query1.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query1.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query10.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query10.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query10.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query11.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query11.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query11.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query12.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query12.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query12.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query13.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query14.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query14.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query14.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query15.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query15.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query15.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query16.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query16.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query16.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query17.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query17.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query17.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query18.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query18.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query18.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query19.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query2.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query2.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query2.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query20.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query20.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query20.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query21.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query21.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query21.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query22.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query22.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query22.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query23.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query24.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query24.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query24.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query25.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query25.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query25.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query26.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query26.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query26.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query27.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query27.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query27.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query28.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query28.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query28.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query29.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query29.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query29.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query3.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query3.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query30.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query30.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query30.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query31.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query31.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query31.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query32.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query32.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query32.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query33.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query33.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query33.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query33.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query34.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query34.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query34.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query35.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query35.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query35.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query35.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query36.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query36.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query36.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query37.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query37.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query37.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query38.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query38.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query38.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query39.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query39.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query39.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query4.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query4.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query4.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query40.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query40.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query40.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query41.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query41.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query41.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query42.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query42.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query42.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query43.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query43.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query43.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query44.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query45.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query46.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query46.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query46.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query47.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query47.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query47.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query48.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query48.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query48.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query49.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query49.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query49.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query5.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query5.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query5.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query50.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query50.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query50.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query51.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query51.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query51.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query52.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query52.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query52.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query53.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query53.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query53.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query54.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query55.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query55.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query55.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query56.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query57.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query57.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query57.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query58.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query58.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query58.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query59.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query59.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query59.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query6.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query60.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query60.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query60.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query61.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query62.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query62.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query62.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query63.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query63.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query63.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query64.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query64.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query64.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query65.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query65.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query65.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query66.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query66.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query66.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query67.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query67.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query67.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query68.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query69.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query69.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query69.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query7.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query7.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query7.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query70.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query70.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query70.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query71.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query71.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query71.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query72.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query72.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query72.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query73.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query73.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query73.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query74.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query74.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query74.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query75.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query75.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query75.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query76.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query76.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query76.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query77.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query77.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query77.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query78.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query78.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query78.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query79.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query79.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query79.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query8.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query80.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query80.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query80.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query81.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query81.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query81.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query82.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query82.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query82.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query83.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query83.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query83.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query83.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query84.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query84.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query84.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query85.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query85.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query85.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query86.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query86.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query86.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query87.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query87.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query87.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query88.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query88.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query88.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query89.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query89.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query89.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query9.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query9.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query9.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query90.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query90.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query90.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query91.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query92.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query92.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query92.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query93.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query93.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query93.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query94.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query94.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query94.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query95.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query96.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query96.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query96.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query97.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query97.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query97.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query98.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query98.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query98.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query99.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query99.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query99.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/ddl/gen_shape.py b/regression-test/suites/shape_check/tpcds_sf10t_orc/ddl/gen_shape.py similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/ddl/gen_shape.py rename to regression-test/suites/shape_check/tpcds_sf10t_orc/ddl/gen_shape.py diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/ddl/shape.tmpl b/regression-test/suites/shape_check/tpcds_sf10t_orc/ddl/shape.tmpl similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/ddl/shape.tmpl rename to regression-test/suites/shape_check/tpcds_sf10t_orc/ddl/shape.tmpl diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/load.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/load.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/load.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/load.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query1.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query1.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query1.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query10.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query10.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query10.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query11.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query11.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query11.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query12.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query12.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query12.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query13.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query14.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query14.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query14.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query15.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query15.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query15.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query16.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query16.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query16.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query17.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query17.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query17.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query18.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query18.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query18.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query19.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query2.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query2.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query2.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query20.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query20.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query20.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query21.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query21.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query21.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query22.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query22.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query22.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query23.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query24.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query24.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query24.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query25.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query25.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query25.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query26.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query26.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query26.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query27.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query27.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query27.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query28.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query28.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query28.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query29.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query29.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query29.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query3.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query3.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query30.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query30.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query30.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query31.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query31.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query31.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query32.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query32.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query32.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query33.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query33.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query33.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query33.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query34.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query34.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query34.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query35.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query35.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query35.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query35.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query36.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query36.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query36.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query37.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query37.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query37.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query38.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query38.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query38.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query39.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query39.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query39.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query4.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query4.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query4.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query40.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query40.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query40.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query41.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query41.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query41.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query42.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query42.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query42.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query43.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query43.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query43.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query44.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query45.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query46.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query46.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query46.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query47.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query47.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query47.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query48.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query48.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query48.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query49.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query49.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query49.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query5.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query5.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query5.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query50.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query50.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query50.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query51.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query51.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query51.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query52.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query52.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query52.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query53.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query53.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query53.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query54.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query55.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query55.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query55.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query56.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query57.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query57.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query57.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query58.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query58.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query58.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query59.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query59.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query59.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query6.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query60.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query60.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query60.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query61.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query62.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query62.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query62.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query63.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query63.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query63.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query64.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query64.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query64.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query65.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query65.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query65.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query66.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query66.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query66.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query67.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query67.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query67.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query68.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query69.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query69.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query69.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query7.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query7.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query7.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query70.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query70.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query70.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query71.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query71.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query71.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query72.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query72.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query72.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query73.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query73.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query73.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query74.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query74.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query74.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query75.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query75.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query75.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query76.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query76.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query76.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query77.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query77.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query77.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query78.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query78.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query78.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query79.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query79.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query79.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query8.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query80.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query80.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query80.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query81.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query81.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query81.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query82.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query82.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query82.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query83.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query83.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query83.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query83.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query84.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query84.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query84.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query85.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query85.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query85.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query86.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query86.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query86.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query87.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query87.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query87.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query88.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query88.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query88.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query89.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query89.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query89.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query9.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query9.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query9.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query90.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query90.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query90.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query91.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query92.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query92.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query92.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query93.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query93.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query93.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query94.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query94.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query94.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query95.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query96.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query96.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query96.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query97.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query97.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query97.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query98.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query98.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query98.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query99.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query99.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query99.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q10.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q10.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q10.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q10.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q11.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q11.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q11.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q11.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q12.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q12.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q12.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q12.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q13.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q13.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q13.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q13.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q14.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q14.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q14.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q14.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q15.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q15.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q15.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q15.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q17.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q17.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q17.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q17.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q19.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q19.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q19.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q19.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q3.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q3.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q3.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q3.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q4.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q4.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q4.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q4.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q5.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q5.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q5.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q5.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q7.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q7.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q7.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q7.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q8.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q8.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q8.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q8.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q9.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q9.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q9.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q9.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/load.groovy b/regression-test/suites/shape_check/tpch_sf1000/load.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/load.groovy rename to regression-test/suites/shape_check/tpch_sf1000/load.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q1.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q1.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q1.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q1.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q10.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q10.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q10.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q10.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q11.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q11.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q11.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q11.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q12.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q12.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q12.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q12.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q13.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q13.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q13.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q13.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q14.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q14.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q14.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q14.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q15.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q15.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q15.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q15.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q16.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q16.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q16.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q16.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q17.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q17.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q17.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q17.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q18.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q18.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q18.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q18.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q19.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q19.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q19.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q19.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q2.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q2.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q2.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q2.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q20-rewrite.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q20-rewrite.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q20.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q20.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q21.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q21.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q21.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q21.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q22.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q22.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q22.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q22.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q3.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q3.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q3.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q3.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q4.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q4.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q4.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q4.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q5.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q5.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q6.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q6.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q6.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q6.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q7.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q7.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q7.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q7.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q8.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q8.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q8.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q8.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q9.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q9.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q9.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q9.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q1.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q1.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q1.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q1.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q10.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q10.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q10.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q10.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q11.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q11.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q11.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q11.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q12.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q12.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q12.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q12.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q13.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q13.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q13.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q13.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q14.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q14.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q14.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q14.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q15.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q15.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q15.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q15.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q16.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q16.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q16.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q16.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q17.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q17.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q17.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q17.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q18.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q18.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q18.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q18.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q19.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q19.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q19.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q19.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q2.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q2.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q2.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q2.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q20-rewrite.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q20-rewrite.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q20.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q20.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q21.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q21.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q21.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q21.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q22.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q22.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q22.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q22.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q3.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q3.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q3.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q3.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q4.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q4.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q4.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q4.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q5.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q5.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q5.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q5.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q6.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q6.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q6.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q6.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q7.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q7.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q7.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q7.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q8.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q8.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q8.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q8.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q9.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q9.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q9.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q9.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/runtime_filter/test_pushdown_setop.groovy b/regression-test/suites/shape_check/tpch_sf1000/runtime_filter/test_pushdown_setop.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/runtime_filter/test_pushdown_setop.groovy rename to regression-test/suites/shape_check/tpch_sf1000/runtime_filter/test_pushdown_setop.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q1.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q1.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q1.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q1.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q10.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q10.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q10.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q10.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q11.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q11.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q11.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q11.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q12.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q12.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q12.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q12.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q13.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q13.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q13.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q13.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q14.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q14.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q14.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q14.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q15.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q15.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q15.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q15.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q16.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q16.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q16.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q16.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q17.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q17.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q17.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q17.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q18.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q18.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q18.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q18.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q19.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q19.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q19.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q19.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q2.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q2.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q2.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q2.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q20-rewrite.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q20-rewrite.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q20.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q20.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q21.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q21.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q21.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q21.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q22.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q22.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q22.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q22.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q3.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q3.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q3.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q3.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q4.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q4.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q4.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q4.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q5.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q5.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q5.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q5.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q6.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q6.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q6.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q6.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q7.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q7.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q7.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q7.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q8.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q8.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q8.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q8.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q9.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q9.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q9.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q9.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q1.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q1.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q1.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q1.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q10.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q10.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q10.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q10.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q11.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q11.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q11.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q11.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q12.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q12.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q12.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q12.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q13.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q13.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q13.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q13.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q14.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q14.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q14.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q14.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q15.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q15.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q15.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q15.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q16.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q16.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q16.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q16.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q17.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q17.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q17.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q17.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q18.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q18.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q18.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q18.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q19.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q19.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q19.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q19.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q2.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q2.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q2.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q2.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q20-rewrite.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q20-rewrite.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q20.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q20.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q21.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q21.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q21.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q21.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q22.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q22.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q22.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q22.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q3.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q3.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q3.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q3.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q4.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q4.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q4.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q4.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q5.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q5.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q6.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q6.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q6.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q6.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q7.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q7.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q7.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q7.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q8.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q8.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q8.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q8.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q9.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q9.groovy diff --git a/tools/tpcds-tools/ddl/create-tpcds-tables-sf1000.sql b/tools/tpcds-tools/ddl/create-tpcds-tables-sf1000.sql index 7e950580553f68..23b16480be3eb1 100644 --- a/tools/tpcds-tools/ddl/create-tpcds-tables-sf1000.sql +++ b/tools/tpcds-tools/ddl/create-tpcds-tables-sf1000.sql @@ -213,7 +213,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 3 +DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 4 PROPERTIES ( "replication_num" = "1" ); @@ -339,7 +339,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(inv_item_sk, inv_warehouse_sk) BUCKETS 1 +DISTRIBUTED BY HASH(inv_item_sk, inv_warehouse_sk) BUCKETS 2 PROPERTIES ( "replication_num" = "1" ); @@ -449,7 +449,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 1 +DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 2 PROPERTIES ( "replication_num" = "1" ); @@ -648,7 +648,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 1 +DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 2 PROPERTIES ( "replication_num" = "1" ); @@ -826,7 +826,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 3 +DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 2 PROPERTIES ( "replication_num" = "1" ); @@ -1006,7 +1006,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(sr_item_sk, sr_ticket_number) BUCKETS 1 +DISTRIBUTED BY HASH(sr_item_sk, sr_ticket_number) BUCKETS 2 PROPERTIES ( "replication_num" = "1" ); @@ -1112,7 +1112,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 3 +DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 4 PROPERTIES ( "replication_num" = "1" ); diff --git a/tools/tpcds-tools/ddl/create-tpcds-tables-sf10000.sql b/tools/tpcds-tools/ddl/create-tpcds-tables-sf10000.sql index 87201403d39fb1..e6d62165f97004 100644 --- a/tools/tpcds-tools/ddl/create-tpcds-tables-sf10000.sql +++ b/tools/tpcds-tools/ddl/create-tpcds-tables-sf10000.sql @@ -213,7 +213,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 256 +DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 216 PROPERTIES ( "replication_num" = "1", "colocate_with" = "catalog" @@ -340,7 +340,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(inv_item_sk, inv_warehouse_sk) BUCKETS 256 +DISTRIBUTED BY HASH(inv_item_sk, inv_warehouse_sk) BUCKETS 216 PROPERTIES ( "replication_num" = "1" ); @@ -450,7 +450,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 256 +DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 216 PROPERTIES ( "replication_num" = "1", "colocate_with" = "catalog" @@ -485,7 +485,7 @@ CREATE TABLE IF NOT EXISTS customer_address ( ca_location_type char(20) ) DUPLICATE KEY(ca_address_sk) -DISTRIBUTED BY HASH(ca_address_sk) BUCKETS 256 +DISTRIBUTED BY HASH(ca_address_sk) BUCKETS 216 PROPERTIES ( "replication_num" = "1" ); @@ -650,7 +650,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 256 +DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 216 PROPERTIES ( "replication_num" = "1", "colocate_with" = "web" @@ -829,7 +829,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 256 +DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 216 PROPERTIES ( "replication_num" = "1", "colocate_with" = "web" @@ -1010,7 +1010,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(sr_item_sk, sr_ticket_number) BUCKETS 256 +DISTRIBUTED BY HASH(sr_item_sk, sr_ticket_number) BUCKETS 216 PROPERTIES ( "replication_num" = "1", "colocate_with" = "store" @@ -1117,7 +1117,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 256 +DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 216 PROPERTIES ( "replication_num" = "1", "colocate_with" = "store" @@ -1158,7 +1158,7 @@ CREATE TABLE IF NOT EXISTS customer ( c_last_review_date_sk integer ) DUPLICATE KEY(c_customer_sk) -DISTRIBUTED BY HASH(c_customer_id) BUCKETS 256 +DISTRIBUTED BY HASH(c_customer_id) BUCKETS 216 PROPERTIES ( "replication_num" = "1" );