diff --git a/.github/workflows/comment-to-trigger-teamcity.yml b/.github/workflows/comment-to-trigger-teamcity.yml index e99dd1b4d29463..7872c216d7c3d0 100644 --- a/.github/workflows/comment-to-trigger-teamcity.yml +++ b/.github/workflows/comment-to-trigger-teamcity.yml @@ -56,6 +56,7 @@ jobs: "${COMMENT_BODY}" == *'run external'* || "${COMMENT_BODY}" == *'run cloud_p0'* || "${COMMENT_BODY}" == *'run cloud_p1'* || + "${COMMENT_BODY}" == *'run vault_p0'* || "${COMMENT_BODY}" == *'run arm'* || "${COMMENT_BODY}" == *'run performance'* ]]; then echo "comment_trigger=true" | tee -a "$GITHUB_OUTPUT" @@ -86,7 +87,7 @@ jobs: echo "TARGET_BRANCH='${TARGET_BRANCH}'" | tee -a "$GITHUB_OUTPUT" echo "COMMENT_BODY='${COMMENT_BODY}'" | tee -a "$GITHUB_OUTPUT" - reg="run (buildall|compile|p0|p1|feut|beut|cloudut|external|clickbench|cloud_p0|cloud_p1|arm|performance)( [1-9]*[0-9]+)*" + reg="run (buildall|compile|p0|p1|feut|beut|cloudut|external|clickbench|cloud_p0|cloud_p1|vault_p0|arm|performance)( [1-9]*[0-9]+)*" COMMENT_TRIGGER_TYPE="$(echo -e "${COMMENT_BODY}" | xargs | grep -E "${reg}" | awk -F' ' '{print $2}' | sed -n 1p | sed 's/\r//g')" COMMENT_REPEAT_TIMES="$(echo -e "${COMMENT_BODY}" | xargs | grep -E "${reg}" | awk -F' ' '{print $3}' | sed -n 1p | sed 's/\r//g')" echo "COMMENT_TRIGGER_TYPE=${COMMENT_TRIGGER_TYPE}" | tee -a "$GITHUB_OUTPUT" @@ -139,8 +140,10 @@ jobs: fi if file_changed_cloud_p0; then echo "changed_cloud_p0=true" | tee -a "$GITHUB_OUTPUT" + echo "changed_vault_p0=true" | tee -a "$GITHUB_OUTPUT" else echo "changed_cloud_p0=false" | tee -a "$GITHUB_OUTPUT" + echo "changed_vault_p0=false" | tee -a "$GITHUB_OUTPUT" fi if file_changed_cloud_p1; then echo "changed_cloud_p1=true" | tee -a "$GITHUB_OUTPUT" @@ -159,6 +162,7 @@ jobs: echo "changed_performance=true" | tee -a "$GITHUB_OUTPUT" echo "changed_cloud_p0=true" | tee -a "$GITHUB_OUTPUT" echo "changed_cloud_p1=true" | tee -a "$GITHUB_OUTPUT" + echo "changed_vault_p0=true" | tee -a "$GITHUB_OUTPUT" fi # - name: "Setup tmate session" @@ -323,6 +327,33 @@ jobs: "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}" fi + - name: "Trigger or Skip vault_p0" + if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && contains(fromJSON('["vault_p0", "buildall"]'), steps.parse.outputs.COMMENT_TRIGGER_TYPE) }} + run: | + source ./regression-test/pipeline/common/teamcity-utils.sh + if [[ ${{ steps.parse.outputs.COMMENT_TRIGGER_TYPE }} == "buildall" ]]; then + echo "COMMENT_TRIGGER_TYPE is buildall, trigger compile is enough, compile will trigger vault_p0" && exit + fi + set -x + if [[ "${{ steps.parse.outputs.TARGET_BRANCH }}" == "'master'" || + "${{ steps.parse.outputs.TARGET_BRANCH }}" == "'branch-3.0'" ]]; then + echo "PR target branch is in (master, branch-3.0), need run vault_p0" + trigger_or_skip_build \ + "${{ steps.changes.outputs.changed_vault_p0 }}" \ + "${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \ + "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \ + "vault_p0" \ + "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}" + else + echo "PR target branch is not in (master, branch-3.0), skip run vault_p0" + trigger_or_skip_build \ + "false" \ + "${{ steps.parse.outputs.PULL_REQUEST_NUM }}" \ + "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" \ + "vault_p0" \ + "${{ steps.parse.outputs.COMMENT_REPEAT_TIMES }}" + fi + - name: "Trigger or Skip cloud_p1" if: ${{ fromJSON(steps.parse.outputs.comment_trigger) && contains(fromJSON('["cloud_p1", "buildall"]'), steps.parse.outputs.COMMENT_TRIGGER_TYPE) }} run: | @@ -402,3 +433,4 @@ jobs: skip_build "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" cloud_p0 skip_build "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" cloud_p1 skip_build "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" cloudut + skip_build "${{ steps.parse.outputs.COMMIT_ID_FROM_TRIGGER }}" vault_p0 diff --git a/regression-test/pipeline/cloud_p0/README.md b/regression-test/pipeline/cloud_p0/README.md new file mode 100644 index 00000000000000..d5661794b3d008 --- /dev/null +++ b/regression-test/pipeline/cloud_p0/README.md @@ -0,0 +1,3 @@ +## CLOUD P0 CI Pipeline + +This pipeline deploys Doris in cloud mode with S3 storage on a single machine and runs P0 test cases. \ No newline at end of file diff --git a/regression-test/pipeline/common/doris-utils.sh b/regression-test/pipeline/common/doris-utils.sh index 5f549db3c805f2..27f15fd192f75c 100644 --- a/regression-test/pipeline/common/doris-utils.sh +++ b/regression-test/pipeline/common/doris-utils.sh @@ -477,6 +477,16 @@ set_session_variable() { fi } +set_default_storage_vault() { + query_port=$(get_doris_conf_value "${DORIS_HOME}"/fe/conf/fe.conf query_port) + cl="mysql -h127.0.0.1 -P${query_port} -uroot " + if ${cl} -e"set built_in_storage_vault as default storage vault;"; then + echo "INFO: set built_in_storage_vault as default storage vault;" + else + echo "ERROR: set built_in_storage_vault as default storage vault;" && return 1 + fi +} + function reset_doris_session_variables() { # reset all session variables to default if [[ ! -d "${DORIS_HOME:-}" ]]; then return 1; fi @@ -744,6 +754,34 @@ function create_warehouse() { fi } +function create_warehouse_vault() { + if [[ -z ${oss_ak} || -z ${oss_sk} ]]; then + echo "ERROR: env oss_ak and oss_sk are required." && return 1 + fi + + if curl "127.0.0.1:5000/MetaService/http/create_instance?token=greedisgood9999" -d "{ + \"instance_id\": \"cloud_instance_0\", + \"name\":\"cloud_instance_0\", + \"user_id\":\"user-id\", + \"vault\": { + \"obj_info\": { + \"provider\": \"OSS\", + \"region\": \"oss-cn-hongkong\", + \"bucket\": \"doris-community-test\", + \"prefix\": \"cloud_regression_vault\", + \"endpoint\": \"oss-cn-hongkong-internal.aliyuncs.com\", + \"external_endpoint\": \"oss-cn-hongkong-internal.aliyuncs.com\", + \"ak\": \"${oss_ak}\", + \"sk\": \"${oss_sk}\" + } + } + }"; then + echo + else + return 1 + fi +} + function warehouse_add_fe() { local ret if curl "127.0.0.1:5000/MetaService/http/add_cluster?token=greedisgood9999" -d "{ diff --git a/regression-test/pipeline/common/teamcity-utils.sh b/regression-test/pipeline/common/teamcity-utils.sh index 0b9004a383ba4b..94d04152ba5f4f 100644 --- a/regression-test/pipeline/common/teamcity-utils.sh +++ b/regression-test/pipeline/common/teamcity-utils.sh @@ -36,6 +36,7 @@ comment_to_pipeline=( ['performance']='Doris_DorisPerformance_Performance' ['cloud_p0']='Doris_DorisRegression_CloudP0' ['cloud_p1']='Doris_DorisCloudRegression_CloudP1' + ['vault_p0']='Doris_DorisCloudRegression_VaultP0' ) # github中评论的要触发的流水线名字 @@ -56,6 +57,7 @@ conment_to_context=( ['performance']='performance (Doris Performance)' ['cloud_p0']='cloud_p0 (Doris Cloud Regression)' ['cloud_p1']='cloud_p1 (Doris Cloud Regression)' + ['vault_p0']='vault_p0 (Doris Cloud Regression)' ) get_commit_id_of_build() { @@ -280,6 +282,7 @@ trigger_or_skip_build() { skip_build "${COMMIT_ID_FROM_TRIGGER}" "external" skip_build "${COMMIT_ID_FROM_TRIGGER}" "cloud_p0" skip_build "${COMMIT_ID_FROM_TRIGGER}" "cloud_p1" + skip_build "${COMMIT_ID_FROM_TRIGGER}" "vault_p0" fi fi } diff --git a/regression-test/pipeline/vault_p0/README.md b/regression-test/pipeline/vault_p0/README.md new file mode 100644 index 00000000000000..6c9754f4f2eda7 --- /dev/null +++ b/regression-test/pipeline/vault_p0/README.md @@ -0,0 +1,5 @@ +## Vault P0 CI Pipeline + +This pipeline deploys Doris in cloud mode with s3 storage vault on a single machine and runs cases in `regression-test/suites/vault_p0/`. + +The test case relies on an HDFS Docker container, which is set up using Docker Compose. diff --git a/regression-test/pipeline/vault_p0/clean.sh b/regression-test/pipeline/vault_p0/clean.sh new file mode 100644 index 00000000000000..309c9b8e5332ca --- /dev/null +++ b/regression-test/pipeline/vault_p0/clean.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash + +########################### Teamcity Build Step: Command Line ####################### +: <>"${DORIS_HOME}"/ms/conf/doris_cloud.conf + echo >>"${DORIS_HOME}"/ms/conf/doris_cloud.conf + cat "${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/conf/recycler_custom.conf >>"${DORIS_HOME}"/recycler/conf/doris_cloud.conf + echo >>"${DORIS_HOME}"/recycler/conf/doris_cloud.conf + print_doris_conf + + echo "#### 4. start Doris" + JAVA_HOME="$(find /usr/lib/jvm -maxdepth 1 -type d -name 'java-17-*' | sed -n '1p')" + export JAVA_HOME + if ! start_doris_ms; then exit 1; fi + if ! start_doris_recycler; then exit 1; fi + if ! create_warehouse_vault; then exit 1; fi + if ! warehouse_add_fe; then exit 1; fi + if ! warehouse_add_be; then exit 1; fi + # if ! prepare_java_udf; then exit 1; fi + if ! start_doris_fe; then exit 1; fi + if ! start_doris_be; then exit 1; fi + if ! set_default_storage_vault; then exit 1; fi + # if ! deploy_doris_sql_converter; then exit 1; else + # set_session_variable sql_converter_service_url "http://127.0.0.1:${doris_sql_converter_port:-5001}/api/v1/convert" + # fi + if ! check_doris_ready; then exit 1; fi + + echo "#### 5. set session variables" + if ! reset_doris_session_variables; then exit 1; fi + session_variables_file="${teamcity_build_checkoutDir}/regression-test/pipeline/vault_p0/conf/session_variables.sql" + echo -e "\n\ntuned session variables:\n$(cat "${session_variables_file}")\n\n" + set_doris_session_variables_from_file "${session_variables_file}" + # record session variables + set +x + show_session_variables &>"${DORIS_HOME}"/session_variables +) +exit_flag="$?" + +echo "#### 5. check if need backup doris logs" +if [[ ${exit_flag} != "0" ]]; then + stop_doris + print_doris_fe_log + print_doris_be_log + if file_name=$(archive_doris_logs "${pr_num_from_trigger}_${commit_id_from_trigger}_$(date +%Y%m%d%H%M%S)_doris_logs.tar.gz"); then + upload_doris_log_to_oss "${file_name}" + fi +fi + +exit "${exit_flag}" +##################################################################################### diff --git a/regression-test/pipeline/vault_p0/prepare.sh b/regression-test/pipeline/vault_p0/prepare.sh new file mode 100644 index 00000000000000..eeb40e3014dd7a --- /dev/null +++ b/regression-test/pipeline/vault_p0/prepare.sh @@ -0,0 +1,176 @@ +#!/usr/bin/env bash + +########################### Teamcity Build Step: Command Line ####################### +: < 流水线开始跑,这个时间段中如果有新commit, +这时候流水线 checkout 出来的 commit 就不是触发时的传过来的 commit 了, +这种情况不需要跑,预期 pr owner 会重新触发。" + echo -e "ERROR: PR(${pr_num_from_trigger}), + the commit_id_from_checkout + ${commit_id_from_checkout} + not equail to the commit_id_from_trigger + ${commit_id_from_trigger} + commit_id_from_trigger is outdate" + exit 1 +fi + +# shellcheck source=/dev/null +source "$(bash "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh 'get')" +if ${skip_pipeline:=false}; then echo "INFO: skip build pipline" && exit 0; else echo "INFO: no skip"; fi +if [[ "${target_branch}" == "master" ]]; then + echo "INFO: PR target branch ${target_branch}" + install_java +else + echo "WARNING: PR target branch ${target_branch} is NOT in (master), skip pipeline." + bash "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh 'set' "export skip_pipeline=true" + exit 0 +fi + +# shellcheck source=/dev/null +# _get_pr_changed_files file_changed_performance +source "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/github-utils.sh +if _get_pr_changed_files "${pr_num_from_trigger}"; then + if ! file_changed_cloud_p0; then + bash "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh 'set' "export skip_pipeline=true" + exit 0 + fi +fi + +echo "#### 2. check if tpch depending files exist" +set -x +if ! [[ -d "${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/ && + -f "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/oss-utils.sh && + -f "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/doris-utils.sh && + -f "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/github-utils.sh && + -f "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh ]]; then + echo "ERROR: depending files missing" && exit 1 +fi + +echo "#### 3. try to kill old doris process" +DORIS_HOME="${teamcity_build_checkoutDir}/output" +export DORIS_HOME +stop_doris +clear_coredump + +echo "#### 4. prepare fundationdb and docker-compose" +install_fdb +clean_fdb "cloud_instance_0" +if ! command -v docker-compose; then + if apt update >/dev/null && apt install -y docker-compose; then + echo "INFO: docker-compose installed" + else + echo "ERROR: docker-compose install failed" && exit 1 + fi +fi + +echo "#### 5. check if binary package ready" +merge_pr_to_master_commit() { + local pr_num_from_trigger="$1" + local target_branch="$2" + local master_commit="$3" + echo "INFO: merge pull request into ${target_branch} ${master_commit}" + if [[ -z "${teamcity_build_checkoutDir}" ]]; then + echo "ERROR: env teamcity_build_checkoutDir not set" && return 1 + fi + cd "${teamcity_build_checkoutDir}" || return 1 + git reset --hard + git fetch origin "${target_branch}" + git checkout "${target_branch}" + git reset --hard origin/"${target_branch}" + git checkout "${master_commit}" + returnValue=$? + if [[ ${returnValue} -ne 0 ]]; then + echo "ERROR: checkout ${target_branch} ${master_commit} failed. please rebase to the newest version." + return 1 + fi + git rev-parse HEAD + git config user.email "ci@selectdb.com" + git config user.name "ci" + echo "git fetch origin refs/pull/${pr_num_from_trigger}/head" + git fetch origin "refs/pull/${pr_num_from_trigger}/head" + git merge --no-edit --allow-unrelated-histories FETCH_HEAD + echo "INFO: merge refs/pull/${pr_num_from_trigger}/head into ${target_branch} ${master_commit}" + # CONFLICTS=$(git ls-files -u | wc -l) + if [[ $(git ls-files -u | wc -l) -gt 0 ]]; then + echo "ERROR: merge refs/pull/${pr_num_from_trigger}/head into failed. Aborting" + git merge --abort + return 1 + fi +} +export OSS_DIR="${OSS_DIR:-"oss://opensource-pipeline/compile_result"}" +if ! check_oss_file_exist "${pr_num_from_trigger}_${commit_id_from_trigger}.tar.gz"; then return 1; fi +if download_oss_file "${pr_num_from_trigger}_${commit_id_from_trigger}.tar.gz"; then + rm -rf "${teamcity_build_checkoutDir}"/output + tar -I pigz -xf "${pr_num_from_trigger}_${commit_id_from_trigger}.tar.gz" + master_commit_file="master.commit" + if [[ -e output/${master_commit_file} ]]; then + # checkout to master commit and merge this pr, to ensure binary and case are same version + master_commit=$(cat output/"${master_commit_file}") + if merge_pr_to_master_commit "${pr_num_from_trigger}" "${target_branch}" "${master_commit}"; then + echo "INFO: merged done" + if [[ "${teamcity_buildType_id:-}" =~ ^Doris_DorisCloudRegression_CloudP1 ]]; then + echo "INFO: 用cloud_p1/conf覆盖cloud_p0/conf" + if [[ -d "${teamcity_build_checkoutDir:-}"/regression-test/pipeline/cloud_p1/conf ]]; then + cp -rf "${teamcity_build_checkoutDir}"/regression-test/pipeline/cloud_p1/conf/* \ + "${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/conf/ + else + echo "ERROR: regression-test/pipeline/cloud_p1/conf not exist" && exit 1 + fi + fi + else + exit 1 + fi + fi +else + exit 1 +fi diff --git a/regression-test/pipeline/vault_p0/run.sh b/regression-test/pipeline/vault_p0/run.sh new file mode 100644 index 00000000000000..d0d0e26e733c77 --- /dev/null +++ b/regression-test/pipeline/vault_p0/run.sh @@ -0,0 +1,167 @@ +#!/usr/bin/env bash + +########################### Teamcity Build Step: Command Line ####################### +: <>"${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/conf/regression-conf-custom.groovy + cp -f "${teamcity_build_checkoutDir}"/regression-test/pipeline/vault_p0/conf/regression-conf-custom.groovy \ + "${teamcity_build_checkoutDir}"/regression-test/conf/ + # # start kafka docker to run case test_rountine_load + # sed -i "s/^CONTAINER_UID=\"doris--\"/CONTAINER_UID=\"doris-external--\"/" "${teamcity_build_checkoutDir}"/docker/thirdparties/custom_settings.env + # if bash "${teamcity_build_checkoutDir}"/docker/thirdparties/run-thirdparties-docker.sh --stop; then echo; fi + # if bash "${teamcity_build_checkoutDir}"/docker/thirdparties/run-thirdparties-docker.sh -c kafka; then echo; else echo "ERROR: start kafka docker failed"; fi + # used to set up HDFS docker + docker_compose_hdfs_yaml=' +version: "3" + +services: + namenode: + image: bde2020/hadoop-namenode:2.0.0-hadoop3.2.1-java8 + environment: + - CLUSTER_NAME=test + container_name: hadoop3-namenode + ports: + - "9870:9870" + expose: + - "9870" + healthcheck: + test: [ "CMD", "curl", "http://localhost:9870/" ] + interval: 5s + timeout: 120s + retries: 120 + network_mode: "host" + + datanode: + image: bde2020/hadoop-datanode:2.0.0-hadoop3.2.1-java8 + ports: + - "9864:9864" + container_name: hadoop3-datanode + expose: + - "9864" + healthcheck: + test: [ "CMD", "curl", "http://localhost:9864" ] + interval: 5s + timeout: 60s + retries: 120 + network_mode: "host" +' + if echo "${docker_compose_hdfs_yaml}" >docker-compose.yaml && docker-compose up -d; then echo; else echo "ERROR: start hdfs docker failed"; fi + JAVA_HOME="$(find /usr/lib/jvm -maxdepth 1 -type d -name 'java-8-*' | sed -n '1p')" + export JAVA_HOME + if "${teamcity_build_checkoutDir}"/run-regression-test.sh \ + --teamcity \ + --run \ + --times "${repeat_times_from_trigger:-1}" \ + -parallel 10 \ + -suiteParallel 10 \ + -actionParallel 10 \ + -runNonConcurrent true; then + echo + else + bash "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh 'set' "export need_collect_log=true" + # regression 测试跑完后输出的汇总信息,Test 1961 suites, failed 1 suites, fatal 0 scripts, skipped 0 scripts + # 如果 test_suites>0 && failed_suites<=3 && fatal_scripts=0,就把返回状态码改为正常的0,让teamcity根据跑case的情况去判断成功还是失败 + # 这样预期能够快速 mute 不稳定的 case + summary=$( + grep -aoE 'Test ([0-9]+) suites, failed ([0-9]+) suites, fatal ([0-9]+) scripts, skipped ([0-9]+) scripts' \ + "${DORIS_HOME}"/regression-test/log/doris-regression-test.*.log + ) + set -x + test_suites=$(echo "${summary}" | cut -d ' ' -f 2) + failed_suites=$(echo "${summary}" | cut -d ' ' -f 5) + fatal_scripts=$(echo "${summary}" | cut -d ' ' -f 8) + if [[ ${test_suites} -gt 0 && ${failed_suites} -le ${failed_suites_threshold:=100} && ${fatal_scripts} -eq 0 ]]; then + echo "INFO: regression test result meet (test_suites>0 && failed_suites<=${failed_suites_threshold} && fatal_scripts=0)" + else + return 1 + fi + fi +} +export -f run +# 设置超时时间(以分为单位) +timeout_minutes=$((${repeat_times_from_trigger:-1} * ${BUILD_TIMEOUT_MINUTES:-180}))m +timeout "${timeout_minutes}" bash -cx run +exit_flag="$?" +if print_running_pipeline_tasks; then :; fi +# shellcheck source=/dev/null +source "$(cd "${teamcity_build_checkoutDir}" && bash "${teamcity_build_checkoutDir}"/regression-test/pipeline/common/get-or-set-tmp-env.sh 'get')" + +echo "#### 5. check if need backup doris logs" +if [[ ${exit_flag} != "0" ]] || ${need_collect_log}; then + check_if_need_gcore "${exit_flag}" + if core_file_name=$(archive_doris_coredump "${pr_num_from_trigger}_${commit_id_from_trigger}_$(date +%Y%m%d%H%M%S)_doris_coredump.tar.gz"); then + reporting_build_problem "coredump" + print_doris_fe_log + print_doris_be_log + fi + stop_doris + if log_file_name=$(archive_doris_logs "${pr_num_from_trigger}_${commit_id_from_trigger}_$(date +%Y%m%d%H%M%S)_doris_logs.tar.gz"); then + if log_info="$(upload_doris_log_to_oss "${log_file_name}")"; then + reporting_messages_error "${log_info##*logs.tar.gz to }" + fi + fi + if core_info="$(upload_doris_log_to_oss "${core_file_name}")"; then reporting_messages_error "${core_info##*coredump.tar.gz to }"; fi +fi + +exit "${exit_flag}"