Skip to content

Commit

Permalink
[ci](cloud) add cloud p1 pipeline (apache#31239)
Browse files Browse the repository at this point in the history
* [ci](cloud) add cloud p1 pipeline
Co-authored-by: stephen <[email protected]>
  • Loading branch information
hello-stephen authored Feb 22, 2024
1 parent abb9874 commit 001af83
Show file tree
Hide file tree
Showing 8 changed files with 136 additions and 12 deletions.
58 changes: 51 additions & 7 deletions .github/workflows/comment-to-trigger-teamcity.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ jobs:
"${COMMENT_BODY}" == *'run external'* ||
"${COMMENT_BODY}" == *'run pipelinex_p0'* ||
"${COMMENT_BODY}" == *'run cloud_p0'* ||
"${COMMENT_BODY}" == *'run cloud_p1'* ||
"${COMMENT_BODY}" == *'run arm'* ||
"${COMMENT_BODY}" == *'run performance'* ]]; then
echo "comment_trigger=true" | tee -a "$GITHUB_OUTPUT"
Expand All @@ -64,7 +65,7 @@ jobs:
echo "TARGET_BRANCH='${TARGET_BRANCH}'" | tee -a "$GITHUB_OUTPUT"
echo "COMMENT_BODY='${COMMENT_BODY}'" | tee -a "$GITHUB_OUTPUT"
reg="run (buildall|compile|p0|p1|feut|beut|cloudut|external|clickbench|pipelinex_p0|cloud_p0|arm|performance)( [1-9]*[0-9]+)*"
reg="run (buildall|compile|p0|p1|feut|beut|cloudut|external|clickbench|pipelinex_p0|cloud_p0|cloud_p1|arm|performance)( [1-9]*[0-9]+)*"
COMMENT_TRIGGER_TYPE="$(echo -e "${COMMENT_BODY}" | xargs | grep -E "${reg}" | awk -F' ' '{print $2}' | sed -n 1p | sed 's/\r//g')"
COMMENT_REPEAT_TIMES="$(echo -e "${COMMENT_BODY}" | xargs | grep -E "${reg}" | awk -F' ' '{print $3}' | sed -n 1p | sed 's/\r//g')"
echo "COMMENT_TRIGGER_TYPE=${COMMENT_TRIGGER_TYPE}" | tee -a "$GITHUB_OUTPUT"
Expand Down Expand Up @@ -122,6 +123,11 @@ jobs:
else
echo "changed_cloud_p0=false" | tee -a "$GITHUB_OUTPUT"
fi
if file_changed_cloud_p1; then
echo "changed_cloud_p1=true" | tee -a "$GITHUB_OUTPUT"
else
echo "changed_cloud_p1=false" | tee -a "$GITHUB_OUTPUT"
fi
else
echo "INFO: failed to _get_pr_changed_files, default trigger all"
echo "changed_fe_ut=true" | tee -a "$GITHUB_OUTPUT"
Expand All @@ -134,6 +140,7 @@ jobs:
echo "changed_p1=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_performance=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_cloud_p0=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_cloud_p1=true" | tee -a "$GITHUB_OUTPUT"
fi
# - name: "Setup tmate session"
Expand Down Expand Up @@ -258,12 +265,49 @@ jobs:
echo "COMMENT_TRIGGER_TYPE is buildall, trigger compile is enough, compile will trigger cloud_p0" && exit
fi
set -x
trigger_or_skip_build \
"${{ steps.changes.outputs.changed_cloud_p0 }}" \
"${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
"${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
"cloud_p0" \
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
if [[ "${{ steps.parse.outputs.TARGET_BRANCH }}" == "'master'" ]]; then
echo "PR target branch in (master), need run cloud_p0"
trigger_or_skip_build \
"${{ steps.changes.outputs.changed_cloud_p0 }}" \
"${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
"${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
"cloud_p0" \
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
else
echo "PR target branch not in (master), skip run cloud_p0"
trigger_or_skip_build \
"false" \
"${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
"${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
"cloud_p0" \
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
fi
- name: "Trigger or Skip cloud_p1"
if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && contains(fromJSON('["cloud_p1", "buildall"]'), steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
run: |
source ./regression-test/pipeline/common/teamcity-utils.sh
if [[ ${{ steps.parse.outputs.COMMENT_TRIGGER_TYPE }} == "buildall" ]]; then
echo "COMMENT_TRIGGER_TYPE is buildall, trigger compile is enough, compile will trigger cloud_p1" && exit
fi
set -x
if [[ "${{ steps.parse.outputs.TARGET_BRANCH }}" == "'master'" ]]; then
echo "PR target branch in (master), need run cloud_p1"
trigger_or_skip_build \
"${{ steps.changes.outputs.changed_cloud_p1 }}" \
"${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
"${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
"cloud_p1" \
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
else
echo "PR target branch not in (master), skip run cloud_p1"
trigger_or_skip_build \
"false" \
"${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
"${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
"cloud_p1" \
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
fi
- name: "Trigger or Skip arm"
if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && contains(fromJSON('["arm", "buildall"]'), steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
Expand Down
31 changes: 31 additions & 0 deletions regression-test/pipeline/cloud_p1/conf/be_custom.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
streaming_load_rpc_max_alive_time_sec = 72000
quick_cooldown = true
disable_stream_load_2pc=false
enable_vectorized_alter_table = true
enable_new_scan_node = true
push_worker_count_high_priority = 2
streaming_load_max_mb = 107374182400
clear_file_cache=true
enable_file_cache=true
mem_limit=50%
#disable_storage_page_cache = true
enable_file_cache_query_limit=true
file_cache_max_file_segment_size=1048576
s3_write_buffer_whole_size=52428800
enable_vertical_compaction=true
fuzzy_vertical_compaction=true
vacuum_stale_rowsets_interval_seconds=60
tablet_rowset_stale_sweep_time_sec=300
user_files_secure_path=/
enable_file_cache_as_load_buffer=true
enable_merge_on_write_correctness_check=true
enable_debug_points=true
prioritize_query_perf_in_compaction = true
cumulative_compaction_min_deltas = 5
#p0 parameter
meta_service_endpoint = 127.0.0.1:5000
cloud_unique_id = cloud_unique_id_compute_node0
meta_service_use_load_balancer = false
enable_file_cache = true
file_cache_path = [{"path":"/data/doris_cloud/file_cache","total_size":104857600,"query_limit":104857600}]
tmp_file_dirs = [{"path":"/data/doris_cloud/tmp","max_cache_bytes":104857600,"max_upload_bytes":104857600}]
35 changes: 35 additions & 0 deletions regression-test/pipeline/cloud_p1/conf/fe_custom.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
stream_load_default_timeout_second = 72000
replication_num_forced_in_cloud_mode = true
ignore_unsupported_properties_in_cloud_mode = true
enable_array_type = true
tablet_stat_update_interval_second = 10
catalog_trash_expire_second = 600
cloud_delete_loaded_internal_stage_files = true
merge_on_write_forced_to_false = true
enable_ssl = true
light_schema_change_force_to_true = true
enable_mtmv = true
remote_fragment_exec_timeout_ms=60000
dynamic_partition_check_interval_seconds=10
use_fuzzy_session_variable=true

enable_cloud_snapshot_version = true
enable_auto_collect_statistics = false

forbid_function_stmt = false
forbid_insecurity_stmt = false

enable_debug_points = true

disable_datev1=false

disable_decimalv2=false
max_query_profile_num=1000

statistics_sql_mem_limit_in_bytes=21474836480
cpu_resource_limit_per_analyze_task=-1

priority_networks=127.0.0.1/24
cloud_http_port=18030
meta_service_endpoint=127.0.0.1:5000
cloud_unique_id=cloud_unique_id_sql_server00
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
testGroups = "p1"
6 changes: 6 additions & 0 deletions regression-test/pipeline/cloud_p1/conf/session_variables.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
-- set those session variables before run cloud p0 regression
set global insert_visible_timeout_ms=60000;
set global enable_auto_analyze=false;
set global enable_audit_plugin=true;
set global enable_memtable_on_sink_node=false;
set global enable_two_phase_read_opt = false;
11 changes: 6 additions & 5 deletions regression-test/pipeline/common/doris-utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -456,7 +456,8 @@ archive_doris_logs() {
archive_content="${archive_content} session_variables"
fi
if [[ -d "${DORIS_HOME}"/ms ]]; then
cp -rf /var/log/foundationdb "${DORIS_HOME}"/foundationdb/log
mkdir -p "${DORIS_HOME}"/foundationdb/log
cp -rf /var/log/foundationdb/* "${DORIS_HOME}"/foundationdb/log/
archive_content="${archive_content} ms/conf ms/log foundationdb/log"
fi
if [[ -d "${DORIS_HOME}"/recycler ]]; then
Expand Down Expand Up @@ -565,11 +566,11 @@ function create_warehouse() {
\"user_id\":\"user-id\",
\"obj_info\": {
\"provider\": \"COS\",
\"region\": \"ap-beijing\",
\"bucket\": \"doris-build-1308700295\",
\"region\": \"ap-hongkong\",
\"bucket\": \"doris-build-hk-1308700295\",
\"prefix\": \"ci\",
\"endpoint\": \"cos.ap-beijing.myqcloud.com\",
\"external_endpoint\": \"cos.ap-beijing.myqcloud.com\",
\"endpoint\": \"cos.ap-hongkong.myqcloud.com\",
\"external_endpoint\": \"cos.ap-hongkong.myqcloud.com\",
\"ak\": \"${COS_ak}\",
\"sk\": \"${COS_sk}\"
}
Expand Down
4 changes: 4 additions & 0 deletions regression-test/pipeline/common/github-utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -316,6 +316,10 @@ file_changed_cloud_p0() {
echo "return no need" && return 1
}

file_changed_cloud_p1() {
file_changed_cloud_p0
}

file_changed_regression_p0() {
local all_files
all_files=$(cat all_files)
Expand Down
2 changes: 2 additions & 0 deletions regression-test/pipeline/common/teamcity-utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ comment_to_pipeline=(
['arm']='Doris_ArmPipeline_P0Regression'
['performance']='Doris_DorisPerformance_Performance'
['cloud_p0']='Doris_DorisRegression_CloudP0'
['cloud_p1']='Doris_DorisCloudRegression_CloudP1'
)

# github中评论的要触发的流水线名字
Expand All @@ -56,6 +57,7 @@ conment_to_context=(
['arm']='P0 Regression (ARM pipeline)'
['performance']='performance (Doris Performance)'
['cloud_p0']='cloud_p0 (Doris Cloud Regression)'
['cloud_p1']='cloud_p1 (Doris Cloud Regression)'
)

get_commit_id_of_build() {
Expand Down

0 comments on commit 001af83

Please sign in to comment.