Skip to content

Commit

Permalink
[ci](vault) add vault_p0 pipeline (#45964)
Browse files Browse the repository at this point in the history
  • Loading branch information
hello-stephen authored Jan 2, 2025
1 parent 3f9d833 commit 215c0f6
Show file tree
Hide file tree
Showing 15 changed files with 693 additions and 1 deletion.
34 changes: 33 additions & 1 deletion .github/workflows/comment-to-trigger-teamcity.yml
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ jobs:
"${COMMENT_BODY}" == *'run external'* ||
"${COMMENT_BODY}" == *'run cloud_p0'* ||
"${COMMENT_BODY}" == *'run cloud_p1'* ||
"${COMMENT_BODY}" == *'run vault_p0'* ||
"${COMMENT_BODY}" == *'run arm'* ||
"${COMMENT_BODY}" == *'run performance'* ]]; then
echo "comment_trigger=true" | tee -a "$GITHUB_OUTPUT"
Expand Down Expand Up @@ -86,7 +87,7 @@ jobs:
echo "TARGET_BRANCH='${TARGET_BRANCH}'" | tee -a "$GITHUB_OUTPUT"
echo "COMMENT_BODY='${COMMENT_BODY}'" | tee -a "$GITHUB_OUTPUT"
reg="run (buildall|compile|p0|p1|feut|beut|cloudut|external|clickbench|cloud_p0|cloud_p1|arm|performance)( [1-9]*[0-9]+)*"
reg="run (buildall|compile|p0|p1|feut|beut|cloudut|external|clickbench|cloud_p0|cloud_p1|vault_p0|arm|performance)( [1-9]*[0-9]+)*"
COMMENT_TRIGGER_TYPE="$(echo -e "${COMMENT_BODY}" | xargs | grep -E "${reg}" | awk -F' ' '{print $2}' | sed -n 1p | sed 's/\r//g')"
COMMENT_REPEAT_TIMES="$(echo -e "${COMMENT_BODY}" | xargs | grep -E "${reg}" | awk -F' ' '{print $3}' | sed -n 1p | sed 's/\r//g')"
echo "COMMENT_TRIGGER_TYPE=${COMMENT_TRIGGER_TYPE}" | tee -a "$GITHUB_OUTPUT"
Expand Down Expand Up @@ -139,8 +140,10 @@ jobs:
fi
if file_changed_cloud_p0; then
echo "changed_cloud_p0=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_vault_p0=true" | tee -a "$GITHUB_OUTPUT"
else
echo "changed_cloud_p0=false" | tee -a "$GITHUB_OUTPUT"
echo "changed_vault_p0=false" | tee -a "$GITHUB_OUTPUT"
fi
if file_changed_cloud_p1; then
echo "changed_cloud_p1=true" | tee -a "$GITHUB_OUTPUT"
Expand All @@ -159,6 +162,7 @@ jobs:
echo "changed_performance=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_cloud_p0=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_cloud_p1=true" | tee -a "$GITHUB_OUTPUT"
echo "changed_vault_p0=true" | tee -a "$GITHUB_OUTPUT"
fi
# - name: "Setup tmate session"
Expand Down Expand Up @@ -323,6 +327,33 @@ jobs:
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
fi
- name: "Trigger or Skip vault_p0"
if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && contains(fromJSON('["vault_p0", "buildall"]'), steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
run: |
source ./regression-test/pipeline/common/teamcity-utils.sh
if [[ ${{ steps.parse.outputs.COMMENT_TRIGGER_TYPE }} == "buildall" ]]; then
echo "COMMENT_TRIGGER_TYPE is buildall, trigger compile is enough, compile will trigger vault_p0" && exit
fi
set -x
if [[ "${{ steps.parse.outputs.TARGET_BRANCH }}" == "'master'" ||
"${{ steps.parse.outputs.TARGET_BRANCH }}" == "'branch-3.0'" ]]; then
echo "PR target branch is in (master, branch-3.0), need run vault_p0"
trigger_or_skip_build \
"${{ steps.changes.outputs.changed_vault_p0 }}" \
"${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
"${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
"vault_p0" \
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
else
echo "PR target branch is not in (master, branch-3.0), skip run vault_p0"
trigger_or_skip_build \
"false" \
"${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \
"${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \
"vault_p0" \
"${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}"
fi
- name: "Trigger or Skip cloud_p1"
if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && contains(fromJSON('["cloud_p1", "buildall"]'), steps.parse.outputs.COMMENT_TRIGGER_TYPE) }}
run: |
Expand Down Expand Up @@ -402,3 +433,4 @@ jobs:
skip_build "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" cloud_p0
skip_build "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" cloud_p1
skip_build "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" cloudut
skip_build "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" vault_p0
3 changes: 3 additions & 0 deletions regression-test/pipeline/cloud_p0/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
## CLOUD P0 CI Pipeline

This pipeline deploys Doris in cloud mode with S3 storage on a single machine and runs P0 test cases.
38 changes: 38 additions & 0 deletions regression-test/pipeline/common/doris-utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -477,6 +477,16 @@ set_session_variable() {
fi
}

set_default_storage_vault() {
query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port)
cl="mysql -h127.0.0.1 -P${query_port} -uroot "
if ${cl} -e"set built_in_storage_vault as default storage vault;"; then
echo "INFO: set built_in_storage_vault as default storage vault;"
else
echo "ERROR: set built_in_storage_vault as default storage vault;" && return 1
fi
}

function reset_doris_session_variables() {
# reset all session variables to default
if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi
Expand Down Expand Up @@ -744,6 +754,34 @@ function create_warehouse() {
fi
}

function create_warehouse_vault() {
if [[ -z ${oss_ak} || -z ${oss_sk} ]]; then
echo "ERROR: env oss_ak and oss_sk are required." && return 1
fi

if curl "127.0.0.1:5000/MetaService/http/create_instance?token=greedisgood9999" -d "{
\"instance_id\": \"cloud_instance_0\",
\"name\":\"cloud_instance_0\",
\"user_id\":\"user-id\",
\"vault\": {
\"obj_info\": {
\"provider\": \"OSS\",
\"region\": \"oss-cn-hongkong\",
\"bucket\": \"doris-community-test\",
\"prefix\": \"cloud_regression_vault\",
\"endpoint\": \"oss-cn-hongkong-internal.aliyuncs.com\",
\"external_endpoint\": \"oss-cn-hongkong-internal.aliyuncs.com\",
\"ak\": \"${oss_ak}\",
\"sk\": \"${oss_sk}\"
}
}
}"; then
echo
else
return 1
fi
}

function warehouse_add_fe() {
local ret
if curl "127.0.0.1:5000/MetaService/http/add_cluster?token=greedisgood9999" -d "{
Expand Down
3 changes: 3 additions & 0 deletions regression-test/pipeline/common/teamcity-utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ comment_to_pipeline=(
['performance']='Doris_DorisPerformance_Performance'
['cloud_p0']='Doris_DorisRegression_CloudP0'
['cloud_p1']='Doris_DorisCloudRegression_CloudP1'
['vault_p0']='Doris_DorisCloudRegression_VaultP0'
)

# github中评论的要触发的流水线名字
Expand All @@ -56,6 +57,7 @@ conment_to_context=(
['performance']='performance (Doris Performance)'
['cloud_p0']='cloud_p0 (Doris Cloud Regression)'
['cloud_p1']='cloud_p1 (Doris Cloud Regression)'
['vault_p0']='vault_p0 (Doris Cloud Regression)'
)

get_commit_id_of_build() {
Expand Down Expand Up @@ -280,6 +282,7 @@ trigger_or_skip_build() {
skip_build "${COMMIT_ID_FROM_TRIGGER}" "external"
skip_build "${COMMIT_ID_FROM_TRIGGER}" "cloud_p0"
skip_build "${COMMIT_ID_FROM_TRIGGER}" "cloud_p1"
skip_build "${COMMIT_ID_FROM_TRIGGER}" "vault_p0"
fi
fi
}
Expand Down
5 changes: 5 additions & 0 deletions regression-test/pipeline/vault_p0/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
## Vault P0 CI Pipeline

This pipeline deploys Doris in cloud mode with s3 storage vault on a single machine and runs cases in `regression-test/suites/vault_p0/`.

The test case relies on an HDFS Docker container, which is set up using Docker Compose.
33 changes: 33 additions & 0 deletions regression-test/pipeline/vault_p0/clean.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#!/usr/bin/env bash

########################### Teamcity Build Step: Command Line #######################
: <<EOF
#!/bin/bash
export PATH=/usr/local/software/apache-maven-3.6.3/bin:${PATH}
if [[ -f "${teamcity_build_checkoutDir:-}"/regression-test/pipeline/vault_p0/clean.sh ]]; then
cd "${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/
bash -x clean.sh
else
echo "Build Step file missing: regression-test/pipeline/vault_p0/clean.sh" && exit 1
fi
EOF
############################# clean.sh content ########################################
# shellcheck source=/dev/null
# stop_doris, clean_fdb
source "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh

echo "#### Check env"
if [[ -z "${teamcity_build_checkoutDir}" ]]; then echo "ERROR: env teamcity_build_checkoutDir not set" && exit 1; fi

# shellcheck source=/dev/null
source "$(bash "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh 'get')"
if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0; else echo "INFO: no skip"; fi

echo "#### stop doris and clean fdb ####"
DORIS_HOME="${teamcity_build_checkoutDir}/output"
export DORIS_HOME
stop_doris
clean_fdb "cloud_instance_0"

echo "#### docker-compose down ####"
docker-compose down
41 changes: 41 additions & 0 deletions regression-test/pipeline/vault_p0/conf/be_custom.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
streaming_load_rpc_max_alive_time_sec = 72000
quick_cooldown = true
disable_stream_load_2pc=false
enable_vectorized_alter_table = true
enable_new_scan_node = true
push_worker_count_high_priority = 2
streaming_load_max_mb = 107374182400
clear_file_cache=true
enable_file_cache=true
#disable_storage_page_cache = true
enable_file_cache_query_limit=true
file_cache_max_file_segment_size=1048576
s3_write_buffer_whole_size=52428800
enable_vertical_compaction=true
fuzzy_vertical_compaction=true
vacuum_stale_rowsets_interval_seconds=60
tablet_rowset_stale_sweep_time_sec=300
user_files_secure_path=/
enable_file_cache_as_load_buffer=true
enable_merge_on_write_correctness_check=true
enable_debug_points=true
prioritize_query_perf_in_compaction = true
cumulative_compaction_min_deltas = 5
#p0 parameter
meta_service_endpoint = 127.0.0.1:5000
cloud_unique_id = cloud_unique_id_compute_node0
meta_service_use_load_balancer = false
enable_file_cache = true
file_cache_path = [{"path":"/data/doris_cloud/file_cache","total_size":104857600,"query_limit":104857600}]
tmp_file_dirs = [{"path":"/data/doris_cloud/tmp","max_cache_bytes":104857600,"max_upload_bytes":104857600}]
thrift_rpc_timeout_ms = 360000
save_load_error_log_to_s3 = true
enable_stream_load_record = true
stream_load_record_batch_size = 500
webserver_num_workers = 128
enable_new_tablet_do_compaction = true
arrow_flight_sql_port = 8181
pipeline_task_leakage_detect_period_sec=1
crash_in_memory_tracker_inaccurate = true
enable_table_size_correctness_check=true
enable_brpc_connection_check=true
43 changes: 43 additions & 0 deletions regression-test/pipeline/vault_p0/conf/fe_custom.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
stream_load_default_timeout_second = 72000
replication_num_forced_in_cloud_mode = true
ignore_unsupported_properties_in_cloud_mode = true
enable_array_type = true
tablet_stat_update_interval_second = 10
catalog_trash_expire_second = 600
cloud_delete_loaded_internal_stage_files = true
merge_on_write_forced_to_false = true
enable_ssl = true
light_schema_change_force_to_true = true
enable_mtmv = true
remote_fragment_exec_timeout_ms=60000
dynamic_partition_check_interval_seconds=10
use_fuzzy_session_variable=true

enable_cloud_snapshot_version = true
enable_auto_collect_statistics = false

forbid_function_stmt = false
forbid_insecurity_stmt = false

enable_debug_points = true

disable_datev1=false

disable_decimalv2=false
max_query_profile_num=1000

statistics_sql_mem_limit_in_bytes=21474836480
cpu_resource_limit_per_analyze_task=-1

arrow_flight_sql_port = 8081

priority_networks=127.0.0.1/24
cloud_http_port=18030
meta_service_endpoint=127.0.0.1:5000
cloud_unique_id=cloud_unique_id_sql_server00
# for case test_build_mtmv.groovy
enable_job_schedule_second_for_test=true
enable_light_index_change=false

workload_sched_policy_interval_ms = 1000
enable_advance_next_id = true
2 changes: 2 additions & 0 deletions regression-test/pipeline/vault_p0/conf/ms_custom.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# below lines will be appended to the default doris_cloud.conf when deploying meta service
meta_schema_value_version = 1
2 changes: 2 additions & 0 deletions regression-test/pipeline/vault_p0/conf/recycler_custom.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# below lines will be appended to the default doris_cloud.conf when deploying recycler
brpc_listen_port = 6000
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

testDirectories = "vault_p0"
max_failure_num = 10

jdbcUrl = "jdbc:mysql://127.0.0.1:9030/?useLocalSessionState=true&allowLoadLocalInfile=true&zeroDateTimeBehavior=round"
targetJdbcUrl = "jdbc:mysql://127.0.0.1:9030/?useLocalSessionState=true&allowLoadLocalInfile=true&zeroDateTimeBehavior=round"

// for vault case, consistent with the configuration in the create_warehouse_vault method in the doris-utils.sh file.
instanceId="cloud_instance_0"
multiClusterInstanceId="cloud_instance_0"

hdfsFs = "hdfs://127.0.0.1:8020"
hdfsUser = "root"
hdfsPasswd = ""

extHiveHmsHost = "127.0.0.1"
extHiveHmsPort = 7004
extHdfsPort = 8020
extHiveServerPort= 7001
extHiveHmsUser = "root"
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
-- set those session variables before run cloud p0 regression
Loading

0 comments on commit 215c0f6

Please sign in to comment.