[Refactor] Unify the unit tests related names for BE and FE (#15667)

There are some inconsistencies between the unit tests of BE and FE
This pull request unify the two scripts.
1. rename run-ut.sh to run-be-ut.sh
2. use parameter --test to run specified test
   `./run-fe-ut.sh --run [TEST_NAME]`
   `./run-fe-ut.sh --gtest_filter [TEST_NAME_WILDCARD]`
3. Add a new paramter --dry-run to only compile and build the unit tests
   `./run-fe-ut.sh --dry-run`
   `./run-be-ut.sh --dry-run`
This commit is contained in:
lichaoyong 2023-01-09 23:12:33 -08:00 committed by GitHub
parent 15c193918e
commit 3fb0aa2e96
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 80 additions and 73 deletions

View File

@ -27,17 +27,17 @@ and, only build FE by
Unit tests of BE and FE are separted. In general, you can run BE test by
```
./run-ut.sh --run
./run-be-ut.sh
```
run FE test by
```
./run-fe-ut.sh --run
./run-fe-ut.sh
```
## How to run BE UT in command line
Now, BE UT needs some dependency to run, and `./run-ut.sh` helps it. But it is not flexible enough. When you want to run UT in the command-line, you can execute
Now, BE UT needs some dependency to run, and `./run-be-ut.sh` helps it. But it is not flexible enough. When you want to run UT in the command-line, you can execute
```
UDF_RUNTIME_DIR=./ STARROCKS_HOME=./ LD_LIBRARY_PATH=/usr/lib/jvm/java-18-openjdk-amd64/lib/server ./be/ut_build_ASAN/test/starrocks_test

View File

@ -32,41 +32,42 @@ usage() {
echo "
Usage: $0 <options>
Optional options:
--clean clean and build ut
--run build and run ut
--gtest_filter specify test cases
--test [TEST_NAME] run specific test
--dry-run dry-run unit tests
--clean clean old unit tests before run
--with-gcov enable to build with gcov
--with-aws enable to test aws
--with-bench enable to build with benchmark
--with-gcov enable to build with gcov
--module module to run uts
--use-staros enable to build with staros
-j build parallel
Eg.
$0 build ut
$0 --run build and run ut
$0 --run --gtest_filter scan* build and run ut of specified cases
$0 --clean clean and build ut
$0 --clean --run clean, build and run ut
$0 --clean --run --with-gcov clean, build and run ut with gcov
$0 run all unit tests
$0 --test CompactionUtilsTest run compaction test
$0 --dry-run dry-run unit tests
$0 --clean clean old unit tests before run
$0 --help display usage
"
exit 1
}
# -l run and -l gtest_filter only used for compatibility
OPTS=$(getopt \
-n $0 \
-o '' \
-l 'run' \
-l 'test:' \
-l 'dry-run' \
-l 'clean' \
-l "gtest_filter:" \
-l 'with-gcov' \
-l 'module:' \
-l 'with-aws' \
-l 'with-bench' \
-l 'use-staros' \
-l 'with-gcov' \
-o 'j:' \
-l 'help' \
-l 'run' \
-l 'gtest_filter:' \
-- "$@")
if [ $? != 0 ] ; then
@ -76,8 +77,8 @@ fi
eval set -- "$OPTS"
CLEAN=0
RUN=0
TEST_FILTER=*
DRY_RUN=0
TEST_NAME=*
TEST_MODULE=".*"
HELP=0
WITH_AWS=OFF
@ -87,8 +88,10 @@ WITH_GCOV=OFF
while true; do
case "$1" in
--clean) CLEAN=1 ; shift ;;
--run) RUN=1 ; shift ;;
--gtest_filter) TEST_FILTER=$2 ; shift 2;;
--dry-run) DRY_RUN=1 ; shift ;;
--run) shift ;; # Option only for compatibility
--test) TEST_NAME=$2 ; shift 2;;
--gtest_filter) TEST_NAME=$2 ; shift 2;; # Option only for compatibility
--module) TEST_MODULE=$2; shift 2;;
--help) HELP=1 ; shift ;;
--with-aws) WITH_AWS=ON; shift ;;
@ -157,12 +160,11 @@ else
-DWITH_BLOCK_CACHE=${WITH_BLOCK_CACHE} \
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON ../
fi
time ${BUILD_SYSTEM} -j${PARALLEL}
${BUILD_SYSTEM} -j${PARALLEL}
if [ ${RUN} -ne 1 ]; then
echo "Finished"
exit 0
fi
echo "*********************************"
echo " Starting to Run BE Unit Tests "
echo "*********************************"
cd ${STARROCKS_HOME}
export STARROCKS_TEST_BINARY_DIR=${CMAKE_BUILD_DIR}
@ -178,15 +180,6 @@ mkdir -p $LOG_DIR
mkdir -p ${UDF_RUNTIME_DIR}
rm -f ${UDF_RUNTIME_DIR}/*
if [ ${RUN} -ne 1 ]; then
echo "Finished"
exit 0
fi
echo "******************************"
echo " Running StarRocks BE Unittest "
echo "******************************"
. ${STARROCKS_HOME}/bin/common.sh
# ====================== configure JAVA/JVM ====================
@ -218,18 +211,16 @@ if [ "${WITH_BLOCK_CACHE}" == "ON" ]; then
export LD_LIBRARY_PATH=$CACHELIB_DIR/lib:$CACHELIB_DIR/lib64:$CACHELIB_DIR/deps/lib:$CACHELIB_DIR/deps/lib64:$LD_LIBRARY_PATH
fi
echo "GTEST_OPTIONS:${GTEST_OPTIONS}"
# HADOOP_CLASSPATH defined in $STARROCKS_HOME/conf/hadoop_env.sh
# put $STARROCKS_HOME/conf ahead of $HADOOP_CLASSPATH so that custom config can replace the config in $HADOOP_CLASSPATH
export CLASSPATH=$STARROCKS_HOME/conf:$HADOOP_CLASSPATH:$CLASSPATH
# ===========================================================
export STARROCKS_TEST_BINARY_DIR=${STARROCKS_TEST_BINARY_DIR}/test/
export STARROCKS_TEST_BINARY_DIR=${STARROCKS_TEST_BINARY_DIR}/test
if [ $WITH_AWS = "OFF" ]; then
TEST_FILTER="$TEST_FILTER:-*S3*"
TEST_NAME="$TEST_NAME*:-*S3*"
fi
# prepare util test_data
@ -246,20 +237,25 @@ test_files=`find ${STARROCKS_TEST_BINARY_DIR} -type f -perm -111 -name "*test" \
# run cases in starrocks_test in parallel if has gtest-parallel script.
# reference: https://github.com/google/gtest-parallel
if [[ $TEST_MODULE == '.*' || $TEST_MODULE == 'starrocks_test' ]]; then
echo "Run test file: starrocks_test"
if [ -x ${GTEST_PARALLEL} ]; then
${GTEST_PARALLEL} ${STARROCKS_TEST_BINARY_DIR}/starrocks_test --gtest_catch_exceptions=0 --gtest_filter=${TEST_FILTER} --serialize_test_cases ${GTEST_PARALLEL_OPTIONS}
else
${STARROCKS_TEST_BINARY_DIR}/starrocks_test $GTEST_OPTIONS --gtest_filter=${TEST_FILTER}
echo "Run test: ${STARROCKS_TEST_BINARY_DIR}/starrocks_test"
if [ ${DRY_RUN} -eq 0 ]; then
if [ -x ${GTEST_PARALLEL} ]; then
${GTEST_PARALLEL} ${STARROCKS_TEST_BINARY_DIR}/starrocks_test \
--gtest_catch_exceptions=0 --gtest_filter=${TEST_NAME} \
--serialize_test_cases ${GTEST_PARALLEL_OPTIONS}
else
${STARROCKS_TEST_BINARY_DIR}/starrocks_test $GTEST_OPTIONS --gtest_filter=${TEST_NAME}
fi
fi
fi
for test in ${test_files[@]}
do
echo "Run test file: $test"
file_name=${test##*/}
if [ -z $RUN_FILE ] || [ $file_name == $RUN_FILE ]; then
echo "=== Run $file_name ==="
$test $GTEST_OPTIONS --gtest_filter=${TEST_FILTER}
echo "Run test: $test"
if [ ${DRY_RUN} -eq 0 ]; then
file_name=${test##*/}
if [ -z $RUN_FILE ] || [ $file_name == $RUN_FILE ]; then
$test $GTEST_OPTIONS --gtest_filter=${TEST_NAME}
fi
fi
done

View File

@ -30,25 +30,31 @@ usage() {
echo "
Usage: $0 <options>
Optional options:
--clean clean and build ut
--run build and run ut
--test [TEST_NAME] run specific test
--dry-run dry-run unit tests
--coverage run coverage statistic tasks
--dumpcase [PATH] run dump case and save to path
Eg.
$0 build and run ut
$0 --coverage build and run coverage statistic
$0 --run xxx build and run the specified class
$0 --dumpcase path run dump case
$0 run all unit tests
$0 --test com.starrocks.utframe.Demo run demo test
$0 --dry-run dry-run unit tests
$0 --coverage run coverage statistic tasks
$0 --dumpcase /home/disk1/ run dump case and save to path
"
exit 1
}
# -l run only used for compatibility
OPTS=$(getopt \
-n $0 \
-o '' \
-l 'test:' \
-l 'dry-run' \
-l 'coverage' \
-l 'run' \
-l 'dumpcase' \
-l 'help' \
-l 'run' \
-- "$@")
if [ $? != 0 ] ; then
@ -58,17 +64,21 @@ fi
eval set -- "$OPTS"
HELP=0
RUN=0
DRY_RUN=0
RUN_SPECIFIED_TEST=0
TEST_NAME=*
COVERAGE=0
DUMPCASE=0
while true; do
case "$1" in
--coverage) COVERAGE=1 ; shift ;;
--run) RUN=1 ; shift ;;
--test) RUN_SPECIFIED_TEST=1; TEST_NAME=$2; shift 2;;
--run) shift ;; # only used for compatibility
--dumpcase) DUMPCASE=1; shift ;;
--dry-run) DRY_RUN=1 ; shift ;;
--help) HELP=1 ; shift ;;
--) shift ; break ;;
*) ehco "Internal error" ; exit 1 ;;
*) echo "Internal error" ; exit 1 ;;
esac
done
@ -77,11 +87,9 @@ if [ ${HELP} -eq 1 ]; then
exit 0
fi
echo "Build Frontend UT"
echo "******************************"
echo " Runing StarRocksFE Unittest "
echo "******************************"
echo "*********************************"
echo " Starting to Run FE Unit Tests "
echo "*********************************"
cd ${STARROCKS_HOME}/fe/
mkdir -p build/compile
@ -103,20 +111,23 @@ fi
mkdir ut_ports
if [ ${COVERAGE} -eq 1 ]; then
echo "Run coverage statistic"
echo "Run coverage statistic tasks"
ant cover-test
elif [ ${DUMPCASE} -eq 1 ]; then
${MVN_CMD} test -DfailIfNoTests=false -DtrimStackTrace=false -D test=com.starrocks.sql.dump.QueryDumpRegressionTest -D dumpJsonConfig=$1
else
if [ ${RUN} -eq 1 ]; then
echo "Run the specified class: $1"
# eg:
# sh run-fe-ut.sh --run com.starrocks.utframe.Demo
# sh run-fe-ut.sh --run com.starrocks.utframe.Demo#testCreateDbAndTable+test2
# set trimStackTrace to false to show full stack when debugging specified class or case
${MVN_CMD} test -DfailIfNoTests=false -DtrimStackTrace=false -D test=$1
if [ ${RUN_SPECIFIED_TEST} -eq 1 ]; then
echo "Run test: $TEST_NAME"
if [ $DRY_RUN -eq 0 ]; then
# ./run-fe-ut.sh --test com.starrocks.utframe.Demo
# ./run-fe-ut.sh --test com.starrocks.utframe.Demo#testCreateDbAndTable+test2
# set trimStackTrace to false to show full stack when debugging specified class or case
${MVN_CMD} test -DfailIfNoTests=false -DtrimStackTrace=false -D test=$TEST_NAME
fi
else
echo "Run Frontend UT"
${MVN_CMD} test -DfailIfNoTests=false -DtrimStackTrace=false
echo "Run All Frontend Unittests"
if [ $DRY_RUN -eq 0 ]; then
${MVN_CMD} test -DfailIfNoTests=false -DtrimStackTrace=false
fi
fi
fi