[Tool] CI env change to ubuntu (#48373)

Signed-off-by: AndyZiYe <yeziyu@starrocks.com>
This commit is contained in:
andyziye 2024-08-22 09:40:49 +08:00 committed by GitHub
parent f721fd4bec
commit 0ebc5cb360
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 281 additions and 142 deletions

View File

@ -211,6 +211,8 @@ jobs:
env:
PR_NUMBER: ${{ github.event.number }}
BRANCH: ${{ github.base_ref }}
IMAGE_CACHE_ID: ${{ needs.thirdparty-info.outputs.ubuntu_image_cache_id }}
LINUX_DISTRO: ubuntu
steps:
- name: BRANCH INFO
id: branch
@ -225,10 +227,10 @@ jobs:
run: |
rm -rf ./ci-tool && cp -rf /var/lib/ci-tool ./ci-tool && cd ci-tool && git pull && source lib/init.sh
if [[ "${{ needs.be-checker.outputs.output2 }}" == 'true' ]]; then
export image_cache_id=${{ needs.thirdparty-info.outputs.centos7_image_cache_id }}
export image_cache_id=${IMAGE_CACHE_ID}
export image_tag=$BRANCH-$PR_NUMBER
fi
./bin/elastic-ut.sh --pr ${PR_NUMBER} --module be --branch ${{ steps.branch.outputs.branch }} --repository ${{ github.repository }}
./bin/elastic-ut.sh --pr ${PR_NUMBER} --module be --branch ${{ steps.branch.outputs.branch }} --repository ${{ github.repository }} --linuxdistro ${LINUX_DISTRO}
- name: clean ECI
if: always()
@ -446,7 +448,7 @@ jobs:
timeout-minutes: 60
run: |
rm -rf ./ci-tool && cp -rf /var/lib/ci-tool ./ci-tool && cd ci-tool && git pull && source lib/init.sh
./bin/elastic-ut.sh --pr ${PR_NUMBER} --module fe --branch ${{steps.branch.outputs.branch}} --build Release --repository ${{ github.repository }}
./bin/elastic-ut.sh --pr ${PR_NUMBER} --module fe --branch ${{steps.branch.outputs.branch}} --build Release --repository ${{ github.repository }} --linuxdistro ubuntu
- name: Clean ECI
if: always()

View File

@ -90,7 +90,22 @@ jobs:
thirdparty:
- 'thirdparty/**'
- 'docker/dockerfiles/dev-env/dev-env.Dockerfile'
- name: Prepare info
run: |
mkdir be-path-filter && cd be-path-filter
echo ${{ steps.path-filter.outputs.be }} > src_filter.txt
echo ${{ steps.path-filter.outputs.ut }} > test_filter.txt
echo ${{ steps.path-filter.outputs.thirdparty }} > thirdparty_filter.txt
- name: Upload the BE Filter Info
uses: actions/upload-artifact@v4
with:
name: be-path-filter
path: ./be-path-filter/
retention-days: 3
overwrite: true
clang-format:
runs-on: [self-hosted, light]
needs: be-checker
@ -140,7 +155,7 @@ jobs:
thirdparty-update:
runs-on: [self-hosted, normal]
needs: [ be-checker, clang-format ]
needs: [ clang-format ]
name: Thirdparty Update
continue-on-error: true
strategy:
@ -156,10 +171,25 @@ jobs:
run: |
rm -rf ${{ github.workspace }}
mkdir -p ${{ github.workspace }}
- name: Download BE Path Filter Artifact
uses: dawidd6/action-download-artifact@v6
with:
name: be-path-filter
path: be-path-filter
run_id: ${{ github.run_id }}
- name: Parsing path-filter file
id: parsing-path-filter
run: |
cd be-path-filter/; ls;
echo "src_filter=`cat src_filter.txt`" >> $GITHUB_OUTPUT
echo "test_filter.txt=`cat test_filter.txt`" >> $GITHUB_OUTPUT
echo "thirdparty_filter=`cat thirdparty_filter.txt`" >> $GITHUB_OUTPUT
- name: Update Image (${{ matrix.linux }})
id: update-image
if: needs.be-checker.outputs.thirdparty_filter == 'true'
if: steps.parsing-path-filter.outputs.thirdparty_filter == 'true'
env:
linux_distro: ${{ matrix.linux }}
run: |
@ -176,7 +206,7 @@ jobs:
if-no-files-found: ignore
- name: Clean ENV
if: always() && needs.be-checker.outputs.thirdparty_filter == 'true'
if: always() && steps.parsing-path-filter.outputs.thirdparty_filter == 'true'
run: |
cd ci-tool && source lib/init.sh
./bin/elastic-cluster.sh --delete
@ -218,6 +248,8 @@ jobs:
env:
PR_NUMBER: ${{ github.event.number }}
BRANCH: ${{ github.base_ref }}
IMAGE_CACHE_ID: ${{ needs.thirdparty-info.outputs.ubuntu_image_cache_id }}
LINUX_DISTRO: ubuntu
steps:
- name: INIT ECI & RUN UT
id: run_ut
@ -225,11 +257,11 @@ jobs:
timeout-minutes: 90
run: |
rm -rf ./ci-tool && cp -rf /var/lib/ci-tool ./ci-tool && cd ci-tool && git pull && source lib/init.sh
if [[ "${{ needs.thirdparty-info.outputs.centos7_image_cache_id }}" != '' ]]; then
export image_cache_id=${{ needs.thirdparty-info.outputs.centos7_image_cache_id }}
if [[ "${IMAGE_CACHE_ID}" != '' ]]; then
export image_cache_id=${IMAGE_CACHE_ID}
export image_tag=$BRANCH-$PR_NUMBER
fi
./bin/elastic-ut.sh --pr ${PR_NUMBER} --module be --repository ${{ github.repository }} --with-gcov
./bin/elastic-ut.sh --pr ${PR_NUMBER} --module be --repository ${{ github.repository }} --linuxdistro ${LINUX_DISTRO} --with-gcov
- name: clean ECI
if: always()
@ -263,6 +295,8 @@ jobs:
env:
PR_NUMBER: ${{ github.event.number }}
BRANCH: ${{ github.base_ref }}
IMAGE_CACHE_ID: ${{ needs.thirdparty-info.outputs.ubuntu_image_cache_id }}
LINUX_DISTRO: ubuntu
steps:
- name: clean
run: |
@ -282,10 +316,10 @@ jobs:
run: |
rm -rf ./ci-tool && cp -rf /var/lib/ci-tool ./ci-tool && cd ci-tool && git pull && source lib/init.sh
if [[ "${{ needs.be-checker.outputs.thirdparty_filter }}" == 'true' ]]; then
export image_cache_id=${{ needs.thirdparty-info.outputs.centos7_image_cache_id }}
export image_cache_id=${IMAGE_CACHE_ID}
export image_tag=$BRANCH-$PR_NUMBER
fi
./bin/elastic-build.sh --pr ${PR_NUMBER} --repository ${{ github.repository }} --be --clang-tidy
./bin/elastic-build.sh --pr ${PR_NUMBER} --repository ${{ github.repository }} --linuxdistro ${LINUX_DISTRO} --be --clang-tidy
- name: clean ECI
if: always()
@ -330,6 +364,23 @@ jobs:
- 'java-extensions/**'
pom:
- '**/pom.xml'
- name: Prepare info
run: |
mkdir fe-path-filter && cd fe-path-filter
echo ${{ steps.path-filter.outputs.fe }} > src_filter.txt
echo ${{ steps.path-filter.outputs.ut }} > test_filter.txt
echo ${{ steps.path-filter.outputs.java }} > java_filter.txt
echo ${{ steps.path-filter.outputs.extension }} > extension_filter.txt
echo ${{ steps.path-filter.outputs.pom }} > pom_filter.txt
- name: Upload the FE Filter Info
uses: actions/upload-artifact@v4
with:
name: fe-path-filter
path: ./fe-path-filter/
retention-days: 3
overwrite: true
fe-codestyle-check:
runs-on: ubuntu-latest
@ -495,7 +546,7 @@ jobs:
EXTENSION: ${{ needs.fe-codestyle-check.outputs.extension_filter }}
run: |
rm -rf ./ci-tool && cp -rf /var/lib/ci-tool ./ci-tool && cd ci-tool && git pull && source lib/init.sh
./bin/elastic-ut.sh --pr ${PR_NUMBER} --module fe --branch ${{steps.branch.outputs.branch}} --build Release --repository ${{ github.repository }}
./bin/elastic-ut.sh --pr ${PR_NUMBER} --module fe --branch ${{steps.branch.outputs.branch}} --build Release --repository ${{ github.repository }} --linuxdistro ubuntu
- name: Clean ECI
if: always()
@ -541,26 +592,73 @@ jobs:
build:
runs-on: [self-hosted, normal]
needs: [be-checker, fe-checker, test-checker, clang-tidy, fe-ut, thirdparty-info]
needs: [test-checker, clang-tidy, fe-ut, thirdparty-info]
name: BUILD
env:
PR_NUMBER: ${{ github.event.number }}
BRANCH: ${{ github.base_ref }}
IMAGE_CACHE_ID: ${{ needs.thirdparty-info.outputs.ubuntu_image_cache_id }}
LINUX_DISTRO: ubuntu
outputs:
build_output_tar: ${{ steps.run_build.outputs.OUTPUT_TAR }}
base_version: ${{ steps.run_build.outputs.BASE_VERSION }}
is_self_build: ${{ steps.run_build.outputs.is_self_build }}
build_nece: ${{ steps.check-necessity.outputs.BUILD_NECE }}
if: >
always() &&
(needs.clang-tidy.result == 'success' && needs.fe-ut.result == 'success') ||
(needs.be-checker.outputs.src_filter != 'true' && needs.fe-checker.outputs.src_filter == 'true' && needs.fe-ut.result == 'success') ||
(needs.fe-checker.outputs.src_filter != 'true' && needs.be-checker.outputs.src_filter == 'true' && needs.clang-tidy.result == 'success') ||
(needs.be-checker.outputs.src_filter != 'true' && needs.fe-checker.outputs.src_filter != 'true' && needs.test-checker.outputs.output1 == 'true')
always() && needs.clang-tidy.result != 'failure' && needs.fe-ut.result != 'failure'
steps:
- name: CLEAN
run: |
rm -rf ${{ github.workspace }} && mkdir -p ${{ github.workspace }}
- name: Download BE Path Filter Artifact
uses: dawidd6/action-download-artifact@v6
with:
name: be-path-filter
path: be-path-filter
if_no_artifact_found: fail
run_id: ${{ github.run_id }}
- name: Download FE Path Filter Artifact
uses: dawidd6/action-download-artifact@v6
with:
name: fe-path-filter
path: fe-path-filter
if_no_artifact_found: fail
run_id: ${{ github.run_id }}
- name: Parsing BE path-filter file
id: parsing-be-path-filter
run: |
if [[ -e be-path-filter ]]; then
cd be-path-filter/; ls;
echo "src_filter=`cat src_filter.txt`" >> $GITHUB_OUTPUT
echo "test_filter.txt=`cat test_filter.txt`" >> $GITHUB_OUTPUT
echo "thirdparty_filter=`cat thirdparty_filter.txt`" >> $GITHUB_OUTPUT
fi
- name: Parsing FE path-filter file
id: parsing-fe-path-filter
run: |
if [[ -e fe-path-filter ]]; then
cd fe-path-filter/; ls;
echo "src_filter=`cat src_filter.txt`" >> $GITHUB_OUTPUT
echo "test_filter.txt=`cat test_filter.txt`" >> $GITHUB_OUTPUT
echo "java_filter=`cat java_filter.txt`" >> $GITHUB_OUTPUT
echo "extension_filter=`cat extension_filter.txt`" >> $GITHUB_OUTPUT
echo "pom_filter=`cat pom_filter.txt`" >> $GITHUB_OUTPUT
fi
- name: Check necessity
id: check-necessity
if: >
(needs.clang-tidy.result == 'success' && needs.fe-ut.result == 'success') ||
(steps.parsing-be-path-filter.outputs.src_filter != 'true' && steps.parsing-fe-path-filter.outputs.src_filter == 'true' && needs.fe-ut.result == 'success') ||
(steps.parsing-fe-path-filter.outputs.src_filter != 'true' && steps.parsing-be-path-filter.outputs.src_filter == 'true' && needs.clang-tidy.result == 'success') ||
(steps.parsing-be-path-filter.outputs.src_filter != 'true' && steps.parsing-fe-path-filter.outputs.src_filter != 'true' && needs.test-checker.outputs.output1 == 'true')
run: |
echo "BUILD_NECE=true" >> $GITHUB_OUTPUT
- name: BRANCH INFO
id: branch
run: |
@ -571,13 +669,13 @@ jobs:
- name: Rebuild Checker
id: rebuild-checker
env:
be_change: ${{ needs.be-checker.outputs.src_filter }}
fe_change: ${{ needs.fe-checker.outputs.src_filter }}
be_change: ${{ steps.parsing-be-path-filter.outputs.src_filter }}
fe_change: ${{ steps.parsing-fe-path-filter.outputs.src_filter }}
build_type: Release
run: |
echo "package=${package}" >> "$GITHUB_ENV"
if [[ "${be_change}" != "true" && "${fe_change}" != "true" ]]; then
oss_path=oss://${bucket_prefix}-ci-release/${BRANCH}/${build_type}/inspection/pr/StarRocks-
oss_path=oss://${bucket_prefix}-ci-release/${BRANCH}/${build_type}/inspection/pr/ubuntu/StarRocks-
package=$(ossutil64 --config-file ~/.ossutilconfig ls ${oss_path} | grep "tar.gz" | sort -n -r | head -n 1 | awk '{print $NF}')
echo "package=${package}" >> $GITHUB_ENV
fi
@ -588,16 +686,16 @@ jobs:
timeout-minutes: 90
env:
package: ${{ env.package }}
pom_filter: ${{ needs.fe-checker.outputs.pom_filter }}
pom_filter: ${{ steps.parsing-fe-path-filter.outputs.pom_filter }}
run: |
rm -rf ./ci-tool && cp -rf /var/lib/ci-tool ./ci-tool && cd ci-tool && git pull && source lib/init.sh
if [[ "${{ needs.be-checker.outputs.thirdparty_filter }}" == 'true' ]]; then
export image_cache_id=${{ needs.thirdparty-info.outputs.centos7_image_cache_id }}
if [[ "${{ steps.parsing-be-path-filter.outputs.thirdparty_filter }}" == 'true' ]]; then
export image_cache_id=${IMAGE_CACHE_ID}
export image_tag=$BRANCH-$PR_NUMBER
fi
if [[ ${package} == "" ]]; then
./bin/elastic-build.sh --pr ${PR_NUMBER} --repository ${{ github.repository }} --with-gcov --with-trivy
./bin/elastic-build.sh --pr ${PR_NUMBER} --repository ${{ github.repository }} --linuxdistro ${LINUX_DISTRO} --with-gcov --with-trivy
echo "is_self_build=true" >> $GITHUB_OUTPUT
else
echo "Use latest tar: ${package}"
@ -640,6 +738,7 @@ jobs:
name: DEPLOY SR
env:
PR_NUMBER: ${{ github.event.number }}
LINUX_DISTRO: ubuntu
outputs:
fe: ${{steps.deploy_sr.outputs.fe}}
be: ${{steps.deploy_sr.outputs.be}}
@ -661,19 +760,13 @@ jobs:
echo "cloud=true" >> "$GITHUB_OUTPUT"
fi
- name: BRANCH INFO
id: branch
run: |
echo ${{github.base_ref}}
echo "branch=${{github.base_ref}}" >> $GITHUB_OUTPUT
- name: Apply for resources
id: apply_resource
env:
CLUSTER_NAME: ci-admit
run: |
cd ci-tool && source lib/init.sh
./bin/elastic-cluster.sh --template ${CLUSTER_NAME}
./bin/elastic-cluster.sh --template ${CLUSTER_NAME} --linuxdistro ${LINUX_DISTRO}
cp conf/starrocks_deploy.conf /var/local/env/${PR_NUMBER}-starrocks_deploy.conf
echo "deploy_conf_file=/var/local/env/${PR_NUMBER}-starrocks_deploy.conf" >> $GITHUB_OUTPUT
@ -687,7 +780,7 @@ jobs:
if [[ "${{ needs.build.outputs.is_self_build }}" == 'true' ]]; then
bucket_prefix=`echo ${repo%/*} | tr '[:upper:]' '[:lower:]'`
tar_path="oss://${bucket_prefix}-ci-release/${{steps.branch.outputs.branch}}/Release/pr/${{needs.build.outputs.build_output_tar}}"
tar_path="oss://${bucket_prefix}-ci-release/${{github.base_ref}}/Release/pr/${LINUX_DISTRO}/${{needs.build.outputs.build_output_tar}}"
else
tar_path="${{ needs.build.outputs.build_output_tar }}"
fi
@ -760,8 +853,9 @@ jobs:
run: |
branch=${{ steps.branch.outputs.branch }}
bucket_prefix=${{ steps.branch.outputs.bucket_prefix }}
ossutil64 --config-file ~/.ossutilconfig rm oss://${bucket_prefix}-ci-release/$branch/Release/pr/SQL-Tester-XML/${PR_NUMBER}/ -rf
ossutil64 --config-file ~/.ossutilconfig cp test/ oss://${bucket_prefix}-ci-release/$branch/Release/pr/SQL-Tester-XML/${PR_NUMBER}/ --include "*.xml" --recursive --force --tagging="type=ci"
xml_oss_path=oss://${bucket_prefix}-ci-release/$branch/Release/pr/SQL-Tester-XML/${PR_NUMBER}/
ossutil64 --config-file ~/.ossutilconfig rm ${xml_oss_path} -rf
ossutil64 --config-file ~/.ossutilconfig cp test/ ${xml_oss_path} --include "*.xml" --recursive --force --tagging="type=ci"
- name: Upload log
uses: actions/upload-artifact@v4
@ -876,8 +970,9 @@ jobs:
run: |
branch=${{ steps.branch.outputs.branch }}
bucket_prefix=${{ steps.branch.outputs.bucket_prefix }}
ossutil64 --config-file ~/.ossutilconfig rm oss://${bucket_prefix}-ci-release/$branch/Release/pr/Admit-XML/${PR_NUMBER}/ -rf
ossutil64 --config-file ~/.ossutilconfig cp StarRocksTest/result oss://${bucket_prefix}-ci-release/$branch/Release/pr/Admit-XML/${PR_NUMBER}/ --include "*.xml" --recursive --force --tagging="type=ci"
xml_oss_path=oss://${bucket_prefix}-ci-release/$branch/Release/pr/Admit-XML/${PR_NUMBER}/
ossutil64 --config-file ~/.ossutilconfig rm ${xml_oss_path} -rf
ossutil64 --config-file ~/.ossutilconfig cp StarRocksTest/result ${xml_oss_path} --include "*.xml" --recursive --force --tagging="type=ci"
- name: Clean ENV
if: always()
@ -893,6 +988,7 @@ jobs:
PR_NUMBER: ${{ github.event.number }}
BRANCH: ${{ github.base_ref }}
CONF_FILE: ${{ needs.deploy.outputs.deploy_conf_file }}
linuxdistro: ubuntu
steps:
- name: Upload info
run: |

View File

@ -17,12 +17,12 @@ on:
description: 'COMMIT ID'
required: true
type: string
TAR_PATH:
description: 'TAR PATH(Release)'
CENTOS_TAR_PATH:
description: 'TAR PATH(Release & Centos)'
required: false
type: string
ASAN_TAR_PATH:
description: 'TAR PATH(ASAN)'
CENTOS_ASAN_TAR_PATH:
description: 'TAR PATH(ASAN & Centos)'
required: false
type: string
UBUNTU_TAR_PATH:
@ -42,7 +42,7 @@ on:
type: string
default: 'true'
ALL_LINUX:
description: 'Centos7 && Release only?'
description: "ALL_LINUX? (⬜: Ubuntu && Release only)"
type: string
default: 'true'
RETAIN_ENV:
@ -77,8 +77,8 @@ jobs:
outputs:
BRANCH: ${{ steps.param.outputs.BRANCH }}
PR_NUMBER: ${{ steps.param.outputs.PR_NUMBER }}
TAR_PATH: ${{ steps.param.outputs.TAR_PATH }}
ASAN_TAR_PATH: ${{ steps.param.outputs.ASAN_TAR_PATH }}
CENTOS_TAR_PATH: ${{ steps.param.outputs.CENTOS_TAR_PATH }}
CENTOS_ASAN_TAR_PATH: ${{ steps.param.outputs.CENTOS_ASAN_TAR_PATH }}
UBUNTU_TAR_PATH: ${{ steps.param.outputs.UBUNTU_TAR_PATH }}
UBUNTU_ASAN_TAR_PATH: ${{ steps.param.outputs.UBUNTU_ASAN_TAR_PATH }}
BE_UT_LINUX: ${{ steps.param.outputs.BE_UT_LINUX }}
@ -89,7 +89,7 @@ jobs:
- name: CRON PARAM
id: param
run: |
BE_UT_LINUX=centos7
BE_UT_LINUX=ubuntu
if [[ "${{ github.event_name }}" == "schedule" ]]; then
if [[ "${{ github.event.schedule }}" == "0 11 * * 1,3,5" ]]; then
branch=branch-3.3
@ -107,7 +107,7 @@ jobs:
else
branch=main
fi
[[ $((`date +%e` % 2)) -eq 1 ]] && BE_UT_LINUX=ubuntu
[[ $((`date +%e` % 2)) -eq 1 ]] && BE_UT_LINUX=centos7
base_sha=$(gh api /repos/${REPO}/branches/${branch} | jq -r .commit.sha)
echo "::notice::${branch}(${BE_UT_LINUX} ${base_sha})"
[[ "${base_sha}" == "null" ]] && (echo "::error::Get HEAD SHA error, please check." && exit -1);
@ -118,8 +118,8 @@ jobs:
else
echo "BRANCH=${{ inputs.BRANCH }}" >> $GITHUB_OUTPUT
echo "PR_NUMBER=${{ inputs.COMMIT_ID }}" >> $GITHUB_OUTPUT
echo "TAR_PATH=${{ inputs.TAR_PATH }}" >> $GITHUB_OUTPUT
echo "ASAN_TAR_PATH=${{ inputs.ASAN_TAR_PATH }}" >> $GITHUB_OUTPUT
echo "CENTOS_TAR_PATH=${{ inputs.CENTOS_TAR_PATH }}" >> $GITHUB_OUTPUT
echo "CENTOS_ASAN_TAR_PATH=${{ inputs.CENTOS_ASAN_TAR_PATH }}" >> $GITHUB_OUTPUT
echo "UBUNTU_TAR_PATH=${{ inputs.UBUNTU_TAR_PATH }}" >> $GITHUB_OUTPUT
echo "UBUNTU_ASAN_TAR_PATH=${{ inputs.UBUNTU_ASAN_TAR_PATH }}" >> $GITHUB_OUTPUT
echo "RETAIN_ENV=${{ inputs.RETAIN_ENV }}" >> $GITHUB_OUTPUT
@ -217,7 +217,7 @@ jobs:
timeout-minutes: 60
run: |
rm -rf ./ci-tool && cp -rf /var/lib/ci-tool ./ci-tool && cd ci-tool && git pull && source lib/init.sh
./bin/elastic-ut.sh --repository ${{ github.repository }} --branch ${BRANCH} --pr ${PR_NUMBER} --module fe --build Release
./bin/elastic-ut.sh --repository ${{ github.repository }} --branch ${BRANCH} --pr ${PR_NUMBER} --module fe --build Release --linuxdistro ubuntu
- name: Clean ECI
if: always()
@ -293,7 +293,7 @@ jobs:
id: run_build
shell: bash
timeout-minutes: 90
if: (matrix.build_type == 'Release' && matrix.linux == 'centos7') || env.ALL_LINUX == 'true'
if: (matrix.build_type == 'Release' && matrix.linux == 'ubuntu') || env.ALL_LINUX == 'true'
run: |
rm -rf ./ci-tool && cp -rf /var/lib/ci-tool ./ci-tool && cd ci-tool && git pull && source lib/init.sh
./bin/elastic-build.sh --repository ${{ github.repository }} --branch ${BRANCH} --pr ${PR_NUMBER} \
@ -335,8 +335,8 @@ jobs:
env:
ALL_LINUX: needs.info.outputs.ALL_LINUX
outputs:
RELEASE_TAR_PATH: ${{ steps.set_output.outputs.CENTOS_RELEASE_TAR_PATH }}
ASAN_TAR_PATH: ${{ steps.set_output.outputs.CENTOS_ASAN_TAR_PATH }}
CENTOS_RELEASE_TAR_PATH: ${{ steps.set_output.outputs.CENTOS_RELEASE_TAR_PATH }}
CENTOS_ASAN_TAR_PATH: ${{ steps.set_output.outputs.CENTOS_ASAN_TAR_PATH }}
UBUNTU_RELEASE_TAR_PATH: ${{ steps.set_output.outputs.UBUNTU_RELEASE_TAR_PATH }}
UBUNTU_ASAN_TAR_PATH: ${{ steps.set_output.outputs.UBUNTU_ASAN_TAR_PATH }}
IS_SELF_BUILD: ${{ steps.set_output.outputs.IS_SELF_BUILD }}
@ -392,8 +392,10 @@ jobs:
if: always()
run: |
if [[ "${{steps.analyse_artifact.outcome}}" == "skipped" ]]; then
echo "CENTOS_RELEASE_TAR_PATH=${{ needs.info.outputs.TAR_PATH }}" >> $GITHUB_OUTPUT
echo "CENTOS_ASAN_TAR_PATH=${{ needs.info.outputs.ASAN_TAR_PATH }}" >> $GITHUB_OUTPUT
echo "CENTOS_RELEASE_TAR_PATH=${{ needs.info.outputs.CENTOS_TAR_PATH }}" >> $GITHUB_OUTPUT
echo "CENTOS_ASAN_TAR_PATH=${{ needs.info.outputs.CENTOS_ASAN_TAR_PATH }}" >> $GITHUB_OUTPUT
echo "UBUNTU_RELEASE_TAR_PATH=${{ needs.info.outputs.UBUNTU_TAR_PATH }}" >> $GITHUB_OUTPUT
echo "UBUNTU_ASAN_TAR_PATH=${{ needs.info.outputs.UBUNTU_ASAN_TAR_PATH }}" >> $GITHUB_OUTPUT
echo "IS_SELF_BUILD=false" >> $GITHUB_OUTPUT
else
echo "CENTOS_RELEASE_TAR_PATH=${{ steps.analyse_artifact.outputs.CENTOS_RELEASE_TAR_PATH }}" >> $GITHUB_OUTPUT
@ -410,11 +412,11 @@ jobs:
- info
- test_filter
secrets: inherit
if: always() && needs.test_filter.outputs.RELEASE_TAR_PATH != ''
if: always() && needs.test_filter.outputs.CENTOS_RELEASE_TAR_PATH != '' && needs.info.outputs.ALL_LINUX == 'true'
with:
BRANCH: ${{ needs.info.outputs.BRANCH }}
COMMIT_ID: ${{ needs.info.outputs.PR_NUMBER }}
TAR_PATH: ${{ needs.test_filter.outputs.RELEASE_TAR_PATH }}
TAR_PATH: ${{ needs.test_filter.outputs.CENTOS_RELEASE_TAR_PATH }}
BUILD_TYPE: Release
LINUX_DISTRO: centos7
IS_SELF_BUILD: ${{ needs.test_filter.outputs.IS_SELF_BUILD }}
@ -427,11 +429,11 @@ jobs:
- info
- test_filter
secrets: inherit
if: always() && needs.test_filter.outputs.ASAN_TAR_PATH != '' && needs.info.outputs.ALL_LINUX == 'true'
if: always() && needs.test_filter.outputs.CENTOS_ASAN_TAR_PATH != '' && needs.info.outputs.ALL_LINUX == 'true'
with:
BRANCH: ${{ needs.info.outputs.BRANCH }}
COMMIT_ID: ${{ needs.info.outputs.PR_NUMBER }}
TAR_PATH: ${{ needs.test_filter.outputs.ASAN_TAR_PATH }}
TAR_PATH: ${{ needs.test_filter.outputs.CENTOS_ASAN_TAR_PATH }}
BUILD_TYPE: ASAN
LINUX_DISTRO: centos7
IS_SELF_BUILD: ${{ needs.test_filter.outputs.IS_SELF_BUILD }}
@ -444,7 +446,7 @@ jobs:
- info
- test_filter
secrets: inherit
if: always() && needs.test_filter.outputs.UBUNTU_RELEASE_TAR_PATH != '' && needs.info.outputs.ALL_LINUX == 'true'
if: always() && needs.test_filter.outputs.UBUNTU_RELEASE_TAR_PATH != ''
with:
BRANCH: ${{ needs.info.outputs.BRANCH }}
COMMIT_ID: ${{ needs.info.outputs.PR_NUMBER }}

View File

@ -91,21 +91,24 @@ class Filter(logging.Filter):
# replace secret infos
for secret_k, secret_v in SECRET_INFOS.items():
try:
record.msg = record.msg.replace(secret_v, '${%s}' % secret_k)
record.msg = record.msg.replace(secret_v, "${%s}" % secret_k)
except Exception:
record.msg = str(record.msg).replace(secret_v, '${%s}' % secret_k)
record.msg = str(record.msg).replace(secret_v, "${%s}" % secret_k)
if record.levelno < self.msg_level:
return False
return True
def self_print(msg):
def self_print(msg, need_print=True):
# replace secret infos
for secret_k, secret_v in SECRET_INFOS.items():
msg = msg.replace(secret_v, '${%s}' % secret_k)
msg = msg.replace(secret_v, "${%s}" % secret_k)
print(msg)
if need_print:
print(msg)
return msg
__LOG_FILE = os.path.join(LOG_DIR, "sql_test.log")
@ -215,7 +218,7 @@ class StarrocksSQLApiLib(object):
cluster_status_dict = self.get_cluster_status()
if isinstance(cluster_status_dict, str):
if cluster_status_dict == 'abnormal':
if cluster_status_dict == "abnormal":
log.error("FE status is abnormal!")
return
@ -244,10 +247,16 @@ class StarrocksSQLApiLib(object):
be_crash_case = self.case_info.name
title = f"[{self.run_info}] SQL-Tester crash"
run_link = os.environ.get('WORKFLOW_URL', '')
run_link = os.environ.get("WORKFLOW_URL", "")
body = (
"""```\nTest Case:\n %s\n```\n\n ```\nCrash Log: \n%s\n```\n\n```\nSR Version: %s\nBE: %s\nURL: %s\n\n```"""
% (be_crash_case, be_crash_log, cluster_status_dict["version"], cluster_status_dict["ip"][0], run_link)
"""```\nTest Case:\n %s\n```\n\n ```\nCrash Log: \n%s\n```\n\n```\nSR Version: %s\nBE: %s\nURL: %s\n\n```"""
% (
be_crash_case,
be_crash_log,
cluster_status_dict["version"],
cluster_status_dict["ip"][0],
run_link,
)
)
assignee = os.environ.get("ISSUE_AUTHOR")
repo = os.environ.get("GITHUB_REPOSITORY")
@ -294,9 +303,7 @@ class StarrocksSQLApiLib(object):
return -1
time.sleep(10)
cmd = (
f". ~/.bash_profile; cd {self.cluster_path}/be; ulimit -c unlimited; export ASAN_OPTIONS=abort_on_error=1:disable_coredump=0:unmap_shadow_on_exit=1;sh bin/start_be.sh --daemon"
)
cmd = f". ~/.bash_profile; cd {self.cluster_path}/be; ulimit -c unlimited; export ASAN_OPTIONS=abort_on_error=1:disable_coredump=0:unmap_shadow_on_exit=1;sh bin/start_be.sh --daemon"
start_res = shell.expect.go_ex(ip, self.host_user, self.host_password, cmd, timeout=20, b_print_stdout=True)
if start_res["exitstatus"] != 0 or start_res["remote_exitstatus"] != 0:
log.error("Start be error, msg: %s" % start_res)
@ -322,9 +329,7 @@ class StarrocksSQLApiLib(object):
def get_crash_log(self, ip):
log.warning("Get crash log from %s" % ip)
cmd = (
f'cd {self.cluster_path}/be/log/; grep -A10000 "*** Check failure stack trace: ***\|ERROR: AddressSanitizer:" be.out'
)
cmd = f'cd {self.cluster_path}/be/log/; grep -A10000 "*** Check failure stack trace: ***\|ERROR: AddressSanitizer:" be.out'
crash_log = shell.expect.go_ex(ip, self.host_user, self.host_password, cmd, timeout=20, b_print_stdout=False)
return crash_log["result"]
@ -333,11 +338,11 @@ class StarrocksSQLApiLib(object):
wait until be was exited
"""
log.warning("Wait be exit...")
timeout = 60
while timeout > 0:
_timeout = 60
while _timeout > 0:
status_dict = self.get_cluster_status()
if status_dict == 'abnormal':
if status_dict == "abnormal":
# fe abnormal
return
@ -346,7 +351,7 @@ class StarrocksSQLApiLib(object):
else:
time.sleep(5)
timeout -= 1
_timeout -= 1
return
@ -423,7 +428,7 @@ class StarrocksSQLApiLib(object):
env_value = os.environ.get(env_key, "")
else:
# save secrets info
if 'aws' in env_key or 'oss_' in env_key:
if "aws" in env_key or "oss_" in env_key:
SECRET_INFOS[env_key] = env_value
self.__setattr__(env_key, env_value)
@ -636,7 +641,7 @@ class StarrocksSQLApiLib(object):
for i in range(len(result)):
row = [str(item) for item in result[i]]
result[i] = '\t'.join(row)
result[i] = "\t".join(row)
return {"status": True, "result": "\n".join(result), "msg": "OK"}
@ -790,7 +795,7 @@ class StarrocksSQLApiLib(object):
if regex.match(cmd):
# set variable
var = regex.match(cmd).group()
cmd = cmd[len(var):]
cmd = cmd[len(var) :]
var = var[:-1]
# replace variable dynamically, only replace right of '='
@ -885,8 +890,10 @@ class StarrocksSQLApiLib(object):
return
try:
tools.assert_true(re.match(exp_std, act_std, flags=re.S),
"shell result str|re not match,\n[exp]: %s,\n [act]: %s" % (exp_std, act_std))
tools.assert_true(
re.match(exp_std, act_std, flags=re.S),
"shell result str|re not match,\n[exp]: %s,\n [act]: %s" % (exp_std, act_std),
)
except Exception as e:
log.warning("Try to treat res as regex, failed!\n:%s" % e)
@ -907,7 +914,7 @@ class StarrocksSQLApiLib(object):
r"%s" % str(act),
exp[len(REGEX_FLAG) :],
"sql result not match regex:\n- [SQL]: %s\n- [exp]: %s\n- [act]: %s\n---"
% (sql, exp[len(REGEX_FLAG) :], act),
% (self_print(sql, False), exp[len(REGEX_FLAG) :], act),
)
return
@ -933,14 +940,14 @@ class StarrocksSQLApiLib(object):
expect_res,
act,
"sql result not match:\n- [SQL]: %s\n- [exp]: %s\n- [act]: %s\n---"
% (sql, expect_res, act),
% (self_print(sql, False), expect_res, act),
)
else:
tools.assert_count_equal(
expect_res,
act,
"sql result not match:\n- [SQL]: %s\n- [exp]: %s\n- [act]: %s\n---"
% (sql, expect_res, act),
% (self_print(sql, False), expect_res, act),
)
return
elif exp.startswith("{") and exp.endswith("}"):
@ -950,7 +957,8 @@ class StarrocksSQLApiLib(object):
tools.assert_dict_equal(
json.loads(exp),
json.loads(act),
"sql result not match:\n- [SQL]: %s\n- [exp]: %s\n- [act]: %s\n---" % (sql, exp, act),
"sql result not match:\n- [SQL]: %s\n- [exp]: %s\n- [act]: %s\n---"
% (self_print(sql, False), exp, act),
)
return
except Exception as e:
@ -976,11 +984,15 @@ class StarrocksSQLApiLib(object):
tools.assert_list_equal(
exp,
act,
"sql result not match:\n- [SQL]: %s\n- [exp]: %s\n- [act]: %s\n---" % (sql, exp, act),
"sql result not match:\n- [SQL]: %s\n- [exp]: %s\n- [act]: %s\n---"
% (self_print(sql, False), exp, act),
)
else:
tools.assert_count_equal(
exp, act, "sql result not match:\n- [SQL]: %s\n- [exp]: %s\n- [act]: %s\n---" % (sql, exp, act)
exp,
act,
"sql result not match:\n- [SQL]: %s\n- [exp]: %s\n- [act]: %s\n---"
% (self_print(sql, False), exp, act),
)
@staticmethod
@ -1309,6 +1321,7 @@ class StarrocksSQLApiLib(object):
"""
wait async materialized view job finish and return status
"""
# show materialized veiws result
def is_all_finished1():
sql = "SHOW MATERIALIZED VIEWS WHERE database_name='{}' AND NAME='{}'".format(current_db, mv_name)
@ -1317,37 +1330,42 @@ class StarrocksSQLApiLib(object):
if not result["status"]:
tools.assert_true(False, "show mv state error")
results = result["result"]
for res in results:
last_refresh_state = res[12]
for _res in results:
last_refresh_state = _res[12]
if last_refresh_state != "SUCCESS" and last_refresh_state != "MERGED":
return False
return True
def is_all_finished2():
sql = "select STATE from information_schema.task_runs a join information_schema.materialized_views b on a.task_name=b.task_name where b.table_name='{}' and a.`database`='{}'".format(mv_name, current_db)
print(sql)
sql = f"""select STATE from information_schema.task_runs a
join information_schema.materialized_views b
on a.task_name=b.task_name
where b.table_name='{mv_name}'
and a.`database`='{current_db}'
"""
self_print(sql)
result = self.execute_sql(sql, True)
if not result["status"]:
tools.assert_true(False, "show mv state error")
results = result["result"]
for res in results:
if res[0] != "SUCCESS" and res[0] != "MERGED":
for _res in results:
if _res[0] != "SUCCESS" and _res[0] != "MERGED":
return False
return True
# infomation_schema.task_runs result
# information_schema.task_runs result
def get_success_count(results):
cnt = 0
for res in results:
if res[0] == "SUCCESS" or res[0] == "MERGED":
for _res in results:
if _res[0] == "SUCCESS" or _res[0] == "MERGED":
cnt += 1
return cnt
MAX_LOOP_COUNT = 180
return cnt
max_loop_count = 180
is_all_ok = False
count = 0
if check_count is None:
while count < MAX_LOOP_COUNT:
while count < max_loop_count:
is_all_ok = is_all_finished1() and is_all_finished2()
if is_all_ok:
time.sleep(1)
@ -1355,8 +1373,12 @@ class StarrocksSQLApiLib(object):
time.sleep(1)
count += 1
else:
show_sql = "select STATE from information_schema.task_runs a join information_schema.materialized_views b on a.task_name=b.task_name where b.table_name='{}' and a.`database`='{}'".format(mv_name, current_db)
while count < MAX_LOOP_COUNT:
show_sql = f"""select STATE from information_schema.task_runs a
join information_schema.materialized_views b
on a.task_name=b.task_name
where b.table_name='{mv_name}'
and a.`database`='{current_db}'"""
while count < max_loop_count:
print(show_sql)
res = self.execute_sql(show_sql, True)
if not res["status"]:
@ -1370,15 +1392,17 @@ class StarrocksSQLApiLib(object):
break
time.sleep(1)
count += 1
tools.assert_equal(True, is_all_ok, "wait aysnc materialized view finish error")
tools.assert_equal(True, is_all_ok, "wait async materialized view finish error")
def wait_mv_refresh_count(self, db_name, mv_name, expect_count):
show_sql = """select count(*) from information_schema.materialized_views
join information_schema.task_runs using(task_name)
where table_schema='{}' and table_name='{}' and (state = 'SUCCESS' or state = 'MERGED')
""".format(db_name, mv_name)
join information_schema.task_runs using(task_name)
where table_schema='{}' and table_name='{}' and (state = 'SUCCESS' or state = 'MERGED')
""".format(
db_name, mv_name
)
print(show_sql)
cnt = 1
refresh_count = 0
while cnt < 60:
@ -1391,23 +1415,27 @@ class StarrocksSQLApiLib(object):
print("current refresh count is {}, expect is {}".format(refresh_count, expect_count))
time.sleep(1)
cnt += 1
tools.assert_equal(expect_count, refresh_count, "wait too long for the refresh count")
tools.assert_equal(expect_count, refresh_count, "wait too long for the refresh count")
def wait_for_pipe_finish(self, db_name, pipe_name, check_count=60):
"""
wait pipe load finish
"""
state = ""
show_sql = "select state, load_status, last_error from information_schema.pipes where database_name='{}' and pipe_name='{}'".format(db_name, pipe_name)
show_sql = """select state, load_status, last_error
from information_schema.pipes
where database_name='{}' and pipe_name='{}'
""".format(
db_name, pipe_name
)
count = 0
print("waiting for pipe {}.{} finish".format(db_name, pipe_name))
while count < check_count:
res = self.execute_sql(show_sql, True)
print(res)
state = res["result"][0][0]
if state == 'RUNNING':
if state == "RUNNING":
print("pipe state is " + state)
time.sleep(1)
else:
@ -1415,7 +1443,6 @@ class StarrocksSQLApiLib(object):
count += 1
tools.assert_equal("FINISHED", state, "didn't wait for the pipe to finish")
def check_hit_materialized_view_plan(self, res, mv_name):
"""
assert mv_name is hit in query
@ -1460,13 +1487,13 @@ class StarrocksSQLApiLib(object):
# could be faster if making this loop parallel
for sql in sqls:
if sql.startswith(TRINO_FLAG):
sql = sql[len(TRINO_FLAG):]
sql = sql[len(TRINO_FLAG) :]
res = self.trino_execute_sql(sql)
elif sql.startswith(SPARK_FLAG):
sql = sql[len(SPARK_FLAG):]
sql = sql[len(SPARK_FLAG) :]
res = self.spark_execute_sql(sql)
elif sql.startswith(HIVE_FLAG):
sql = sql[len(HIVE_FLAG):]
sql = sql[len(HIVE_FLAG) :]
res = self.hive_execute_sql(sql)
else:
res = self.execute_sql(sql)
@ -1613,7 +1640,9 @@ var columnId = new ColumnId("{col}");
var cid = new ColumnIdentifier(tid, columnId)
out.append("${{dictMgr.NO_DICT_STRING_COLUMNS.contains(cid)}}")
'; """.format(db=db_name, tb=table_name, col=column_name)
'; """.format(
db=db_name, tb=table_name, col=column_name
)
res = self.execute_sql(sql, True)
print("scirpt output:" + str(res))
tools.assert_true(str(res["result"][0][0]).strip() == "true", "column still could collect dictionary")
@ -1738,7 +1767,7 @@ out.append("${{dictMgr.NO_DICT_STRING_COLUMNS.contains(cid)}}")
return res
def prepare_data(self, data_name, db):
""" load data """
"""load data"""
tools.assert_in(data_name, ["ssb", "tpch", "tpcds"], "Unsupported data!")
# create tables
@ -1882,7 +1911,9 @@ out.append("${{dictMgr.NO_DICT_STRING_COLUMNS.contains(cid)}}")
def wait_compaction_finish(self, table_name: str, expected_num_segments: int):
timeout = 300
scan_table_sql = f"SELECT /*+SET_VAR(enable_profile=true,enable_async_profile=false)*/ COUNT(1) FROM {table_name}"
scan_table_sql = (
f"SELECT /*+SET_VAR(enable_profile=true,enable_async_profile=false)*/ COUNT(1) FROM {table_name}"
)
fetch_segments_sql = r"""
with profile as (
select unnest as line from (values(1))t(v) join unnest(split(get_query_profile(last_query_id()), "\n"))
@ -1918,16 +1949,17 @@ out.append("${{dictMgr.NO_DICT_STRING_COLUMNS.contains(cid)}}")
backends = []
for row in res["result"]:
backends.append({
"host": row[1],
"port": row[4],
})
backends.append(
{
"host": row[1],
"port": row[4],
}
)
return backends
def update_be_config(self, key, value):
"""Update the config to all the backends.
"""
"""Update the config to all the backends."""
backends = self._get_backend_http_endpoints()
for backend in backends:
exec_url = f"http://{backend['host']}:{backend['port']}/api/update_config?{key}={value}"
@ -1935,8 +1967,9 @@ out.append("${{dictMgr.NO_DICT_STRING_COLUMNS.contains(cid)}}")
res = self.post_http_request(exec_url)
res_json = json.loads(res)
tools.assert_dict_contains_subset({"status": "OK"}, res_json,
f"failed to update be config [response={res}] [url={exec_url}]")
tools.assert_dict_contains_subset(
{"status": "OK"}, res_json, f"failed to update be config [response={res}] [url={exec_url}]"
)
def assert_table_cardinality(self, sql, rows):
"""
@ -1978,7 +2011,8 @@ out.append("${{dictMgr.NO_DICT_STRING_COLUMNS.contains(cid)}}")
backend_id = res["result"][0][2]
res = self.execute_sql(
"ADMIN SET REPLICA STATUS PROPERTIES('tablet_id' = '%s', 'backend_id' = '%s', 'status' = 'bad')" % (tablet_id, backend_id),
"ADMIN SET REPLICA STATUS PROPERTIES('tablet_id' = '%s', 'backend_id' = '%s', 'status' = 'bad')"
% (tablet_id, backend_id),
True,
)
@ -2002,7 +2036,10 @@ out.append("${{dictMgr.NO_DICT_STRING_COLUMNS.contains(cid)}}")
sql = "explain %s" % query
res = self.execute_sql(sql, True)
for expect in expects:
tools.assert_true(str(res["result"]).find(expect) > 0, "assert expect {} is not found in plan {}".format(expect, res['result']))
tools.assert_true(
str(res["result"]).find(expect) > 0,
"assert expect {} is not found in plan {}".format(expect, res["result"]),
)
def assert_explain_not_contains(self, query, *expects):
"""
@ -2038,15 +2075,14 @@ out.append("${{dictMgr.NO_DICT_STRING_COLUMNS.contains(cid)}}")
sql = "trace values %s" % query
res = self.execute_sql(sql, True)
for expect in expects:
tools.assert_true(str(res["result"]).find(expect) > 0, "assert expect %s is not found in plan, error msg is %s" % (expect, str(res["result"])))
tools.assert_true(
str(res["result"]).find(expect) > 0,
"assert expect %s is not found in plan, error msg is %s" % (expect, str(res["result"])),
)
def assert_prepare_execute(self, db, query, params=()):
conn = mysql.connector.connect(
host=self.mysql_host,
user=self.mysql_user,
password="",
port=self.mysql_port,
database=db
host=self.mysql_host, user=self.mysql_user, password="", port=self.mysql_port, database=db
)
cursor = conn.cursor(prepared=True)
@ -2070,21 +2106,24 @@ out.append("${{dictMgr.NO_DICT_STRING_COLUMNS.contains(cid)}}")
sql = "trace times %s" % query
res = self.execute_sql(sql, True)
for expect in expects:
tools.assert_true(str(res["result"]).find(expect) > 0, "assert expect %s is not found in plan, error msg is %s" % (expect, str(res["result"])))
tools.assert_true(
str(res["result"]).find(expect) > 0,
"assert expect %s is not found in plan, error msg is %s" % (expect, str(res["result"])),
)
def assert_clear_stale_stats(self, query, expect_num):
timeout = 300
num = 0;
num = 0
while timeout > 0:
res = self.execute_sql(query)
num = res["result"]
if int(num) < expect_num:
break;
break
time.sleep(10)
timeout -= 10
else:
tools.assert_true(False, "clear stale column stats timeout. The number of stale column stats is %s" % num)
def assert_table_partitions_num(self, table_name, expect_num):
res = self.execute_sql("SHOW PARTITIONS FROM %s" % table_name, True)
tools.assert_true(res["status"], "show schema change task error")

View File

@ -126,7 +126,7 @@ class TestSQLCases(sr_sql_lib.StarrocksSQLApiLib):
if record_mode:
tools.assert_true(res, "Save %s.%s result error" % (self.case_info.file, self.case_info.name))
log.info("[TeadDown end]: %s" % self.case_info.name)
log.info(f"Execution complete [{self.case_info.name}]!")
# -------------------------------------------
# [CASE]