[Enhancement] Remove junit4 code and dependency (#60477)

Signed-off-by: Binglin Chang <decstery@gmail.com>
This commit is contained in:
Binglin Chang 2025-07-03 17:21:02 +08:00 committed by GitHub
parent 239fb1e348
commit 086c245387
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
33 changed files with 455 additions and 418 deletions

View File

@ -0,0 +1,3 @@
# https://avd.aquasec.com/nvd/cve-2025-52999
# ignore it for now, tracking it in https://github.com/StarRocks/starrocks/issues/60549
CVE-2025-52999

View File

@ -239,26 +239,10 @@ under the License.
<artifactId>json</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/junit/junit -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.junit.jupiter/junit-jupiter -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<version>5.8.2</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.junit.vintage/junit-vintage-engine -->
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<version>5.8.2</version>
<scope>test</scope>
</dependency>
@ -587,14 +571,13 @@ under the License.
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-core</artifactId>
<version>1.23</version>
<version>1.37</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-generator-annprocess</artifactId>
<version>1.23</version>
<version>1.37</version>
<scope>test</scope>
</dependency>
@ -803,13 +786,6 @@ under the License.
<artifactId>encoder</artifactId>
</dependency>
<dependency>
<groupId>com.carrotsearch</groupId>
<artifactId>junit-benchmarks</artifactId>
<version>0.7.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>

View File

@ -14,30 +14,34 @@
package com.starrocks.benchmark;
import com.carrotsearch.junitbenchmarks.BenchmarkOptions;
import com.carrotsearch.junitbenchmarks.BenchmarkRule;
import com.google.common.collect.ImmutableList;
import com.starrocks.common.Pair;
import com.starrocks.sql.common.QueryDebugOptions;
import com.starrocks.sql.optimizer.rule.transformation.materialization.MVTestBase;
import com.starrocks.sql.plan.PlanTestBase;
import org.junit.Rule;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.rules.TestRule;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Warmup;
import java.util.List;
import java.util.concurrent.TimeUnit;
@Disabled
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 3)
@Measurement(iterations = 10)
@Fork(1)
public class MVPartitionCompensateOptBench extends MVTestBase {
private static final int MV_NUMS = 100;
private static final int BENCHMARK_RUNS = 10;
@Rule
public TestRule mvPartitionCompensateBench = new BenchmarkRule();
@BeforeAll
public static void setup() throws Exception {
@ -121,67 +125,67 @@ public class MVPartitionCompensateOptBench extends MVTestBase {
}
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = BENCHMARK_RUNS)
@Disabled("This benchmark should not be run as a test")
public void testMVPartitionCompensatePerf0() {
testMVPartitionCompensatePerf(0);
}
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = BENCHMARK_RUNS)
@Disabled("This benchmark should not be run as a test")
public void testMVPartitionCompensatePerf1() {
testMVPartitionCompensatePerf(1);
}
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = BENCHMARK_RUNS)
@Disabled("This benchmark should not be run as a test")
public void testMVPartitionCompensatePerf2() {
testMVPartitionCompensatePerf(2);
}
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = BENCHMARK_RUNS)
@Disabled("This benchmark should not be run as a test")
public void testMVPartitionCompensatePerf3() {
testMVPartitionCompensatePerf(3);
}
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = BENCHMARK_RUNS)
@Disabled("This benchmark should not be run as a test")
public void testMVPartitionCompensatePerf4() {
testMVPartitionCompensatePerf(4);
}
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = BENCHMARK_RUNS)
@Disabled("This benchmark should not be run as a test")
public void testMVPartitionCompensatePerf5() {
testMVPartitionCompensatePerf(5);
}
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = BENCHMARK_RUNS)
@Disabled("This benchmark should not be run as a test")
public void testMVPartitionCompensatePerf6() {
testMVPartitionCompensatePerf(6);
}
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = BENCHMARK_RUNS)
@Disabled("This benchmark should not be run as a test")
public void testMVPartitionCompensatePerf7() {
testMVPartitionCompensatePerf(7);
}
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = BENCHMARK_RUNS)
@Disabled("This benchmark should not be run as a test")
public void testMVPartitionCompensatePerf8() {
testMVPartitionCompensatePerf(8);
}
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = BENCHMARK_RUNS)
@Disabled("This benchmark should not be run as a test")
public void testMVPartitionCompensatePerf9() {
testMVPartitionCompensatePerf(9);
}
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = BENCHMARK_RUNS)
@Disabled("This benchmark should not be run as a test")
public void testMVPartitionCompensatePerf10() {
testMVPartitionCompensatePerf(10);
}

View File

@ -14,27 +14,28 @@
package com.starrocks.benchmark;
import com.carrotsearch.junitbenchmarks.BenchmarkOptions;
import com.carrotsearch.junitbenchmarks.BenchmarkRule;
import com.google.common.collect.Lists;
import com.starrocks.common.FeConstants;
import com.starrocks.planner.MaterializedViewTestBase;
import com.starrocks.sql.plan.PlanTestBase;
import org.junit.Rule;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.rules.TestRule;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Warmup;
@Disabled
@BenchmarkMode(Mode.AverageTime)
@Fork(1)
@Warmup(iterations = 1)
@Measurement(iterations = 10)
public class MvPreProcessorWithSSBBench extends MaterializedViewTestBase {
private static final int MV_NUMS = 1000;
private static final int BENCHMARK_RUNS = 10;
@Rule
public TestRule mvPartitionCompensateBench = new BenchmarkRule();
@BeforeAll
public static void beforeClass() throws Exception {
@ -132,7 +133,7 @@ public class MvPreProcessorWithSSBBench extends MaterializedViewTestBase {
}
@Test
@BenchmarkOptions(warmupRounds = 1, benchmarkRounds = BENCHMARK_RUNS)
@Disabled
// MvPreProcessorWithSSBBench.testPartitionPredicate: [measured 10 out of 11 rounds, threads: 1 (sequential)]
// round: 0.32 [+- 0.08], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 4, GC.time: 0.05,
// time.total: 4.40, time.warmup: 1.21, time.bench: 3.20
@ -148,7 +149,7 @@ public class MvPreProcessorWithSSBBench extends MaterializedViewTestBase {
}
@Test
@BenchmarkOptions(warmupRounds = 1, benchmarkRounds = BENCHMARK_RUNS)
@Disabled
// MvPreProcessorWithSSBBench.testPartitionPredicate2: [measured 10 out of 11 rounds, threads: 1 (sequential)]
// round: 2.93 [+- 0.10], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 51, GC.time: 1.21,
// time.total: 33.49, time.warmup: 4.16, time.bench: 29.33

View File

@ -14,8 +14,6 @@
package com.starrocks.benchmark;
import com.carrotsearch.junitbenchmarks.BenchmarkOptions;
import com.carrotsearch.junitbenchmarks.BenchmarkRule;
import com.google.api.client.util.Lists;
import com.starrocks.catalog.Database;
import com.starrocks.catalog.MaterializedView;
@ -26,13 +24,12 @@ import com.starrocks.schema.MTable;
import com.starrocks.server.GlobalStateMgr;
import com.starrocks.sql.optimizer.CachingMvPlanContextBuilder;
import com.starrocks.sql.optimizer.rule.transformation.materialization.MVTestBase;
import org.junit.Rule;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.rules.TestRule;
import org.openjdk.jmh.annotations.BenchmarkMode;
import java.util.List;
import java.util.Random;
@ -45,11 +42,9 @@ import java.util.Random;
* refresh mvs with concurrency to test lock and performance
*/
@Disabled
@BenchmarkMode(org.openjdk.jmh.annotations.Mode.AverageTime)
public class MvRefreshConcurrencyTest extends MVTestBase {
@Rule
public TestRule benchRun = new BenchmarkRule();
private static String buildDbName(int idx) {
return "mock_db_" + idx;
}
@ -151,25 +146,25 @@ public class MvRefreshConcurrencyTest extends MVTestBase {
}
@Test
@BenchmarkOptions(warmupRounds = 0, benchmarkRounds = 1)
@Disabled
public void testWithTables2_c4() {
testRefreshWithConcurrency(4, 2);
}
@Test
@BenchmarkOptions(warmupRounds = 0, benchmarkRounds = 1)
@Disabled
public void testWithTables10_c4() {
testRefreshWithConcurrency(10, 4);
}
@Test
@BenchmarkOptions(warmupRounds = 0, benchmarkRounds = 1)
@Disabled
public void testWithTables20_c4() {
testRefreshWithConcurrency(20, 10);
}
@Test
@BenchmarkOptions(warmupRounds = 0, benchmarkRounds = 1)
@Disabled
public void testWithTables50_c16() {
Config.task_runs_concurrency = 16;
testRefreshWithConcurrency(50, 50);
@ -177,7 +172,7 @@ public class MvRefreshConcurrencyTest extends MVTestBase {
}
@Test
@BenchmarkOptions(warmupRounds = 0, benchmarkRounds = 1)
@Disabled
public void testWithTables50_c50() {
Config.task_runs_concurrency = 50;
testRefreshWithConcurrency(50, 50);

View File

@ -14,26 +14,33 @@
package com.starrocks.benchmark;
import com.carrotsearch.junitbenchmarks.BenchmarkOptions;
import com.carrotsearch.junitbenchmarks.BenchmarkRule;
import com.starrocks.common.Config;
import com.starrocks.qe.SessionVariable;
import com.starrocks.sql.optimizer.CachingMvPlanContextBuilder;
import com.starrocks.sql.optimizer.rule.transformation.materialization.MVTestBase;
import org.junit.Rule;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.rules.TestRule;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Warmup;
import java.util.concurrent.TimeUnit;
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 3)
@Measurement(iterations = 20)
@Fork(1)
public class MvRewritePerfTest extends MVTestBase {
private static final int MV_NUM = 40;
@Rule
public TestRule benchRun = new BenchmarkRule();
@BeforeAll
public static void beforeClass() throws Exception {
MVTestBase.beforeClass();
@ -86,7 +93,7 @@ public class MvRewritePerfTest extends MVTestBase {
// round: 0.01 [+- 0.00], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 0, GC.time: 0.00,
// time.total: 0.57, time.warmup: 0.34, time.bench: 0.23
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = 20)
@Benchmark
public void testManyCandidateMv_Join_WithRewriteLimit() throws Exception {
final String sql = " select t0.v1, t0.v2, t0.v3, t1.k1 from t0 left join t1 on t0.v1 = t1.v1";
starRocksAssert.getCtx().getSessionVariable().setCboMaterializedViewRewriteRuleOutputLimit(3);
@ -96,7 +103,7 @@ public class MvRewritePerfTest extends MVTestBase {
// round: 0.01 [+- 0.00], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 0, GC.time: 0.00,
// time.total: 0.23, time.warmup: 0.03, time.bench: 0.20
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = 20)
@Benchmark
public void testManyCandidateMv_Join_WithoutRewriteLimit() throws Exception {
final String sql = " select t0.v1, t0.v2, t0.v3, t1.k1 from t0 left join t1 on t0.v1 = t1.v1";
starRocksAssert.getCtx().getSessionVariable().setCboMaterializedViewRewriteRuleOutputLimit(1000);
@ -106,7 +113,7 @@ public class MvRewritePerfTest extends MVTestBase {
// round: 0.01 [+- 0.00], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 0, GC.time: 0.00,
// time.total: 0.13, time.warmup: 0.02, time.bench: 0.12
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = 20)
@Benchmark
public void testManyCandidateMV_WithCandidateLimit() throws Exception {
final String sql = " select t0.v1, t0.v2, t0.v3, t1.k1 from t0 left join t1 on t0.v1 = t1.v1";
starRocksAssert.getCtx().getSessionVariable().setCboMaterializedViewRewriteCandidateLimit(3);
@ -117,7 +124,7 @@ public class MvRewritePerfTest extends MVTestBase {
// round: 0.02 [+- 0.00], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 0, GC.time: 0.00,
// time.total: 0.40, time.warmup: 0.06, time.bench: 0.34
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = 20)
@Benchmark
public void testManyCandidateMV_WithoutCandidateLimit() throws Exception {
final String sql = " select t0.v1, t0.v2, t0.v3, t1.k1 from t0 left join t1 on t0.v1 = t1.v1";
starRocksAssert.getCtx().getSessionVariable().setCboMaterializedViewRewriteCandidateLimit(0);
@ -128,7 +135,7 @@ public class MvRewritePerfTest extends MVTestBase {
// round: 0.02 [+- 0.00], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 0, GC.time: 0.00,
// time.total: 0.45, time.warmup: 0.07, time.bench: 0.38
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = 20)
@Benchmark
public void testManyCandidateMv_Agg_WithRewriteLimit() throws Exception {
final String sql =
" select t0.v1, sum(t1.v1), count(t1.v2) from t0 left join t1 on t0.v1 = t1.v1 group by t0.v1";
@ -139,7 +146,7 @@ public class MvRewritePerfTest extends MVTestBase {
// round: 0.02 [+- 0.00], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 0, GC.time: 0.00,
// time.total: 0.45, time.warmup: 0.06, time.bench: 0.38
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = 20)
@Benchmark
public void testManyCandidateMv_Agg_WithoutRewriteLimit() throws Exception {
final String sql =
" select t0.v1, sum(t1.v1), count(t1.v2) from t0 left join t1 on t0.v1 = t1.v1 group by t0.v1";

View File

@ -14,24 +14,16 @@
package com.starrocks.benchmark;
import com.carrotsearch.junitbenchmarks.BenchmarkOptions;
import com.carrotsearch.junitbenchmarks.BenchmarkRule;
import com.starrocks.sql.optimizer.dump.QueryDumpInfo;
import com.starrocks.sql.plan.ReplayFromDumpTestBase;
import com.starrocks.utframe.UtFrameUtils;
import org.junit.Rule;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.rules.TestRule;
/**
* Benchmark planner performance for specific query dump
*/
public class QueryDumpPlannerBenchTest extends ReplayFromDumpTestBase {
@Rule
public TestRule benchRun = new BenchmarkRule();
private static String sql;
// NOTE: ignore the test in CI
@ -56,7 +48,7 @@ public class QueryDumpPlannerBenchTest extends ReplayFromDumpTestBase {
*/
@Test
@Disabled
@BenchmarkOptions(concurrency = 1, warmupRounds = 10, benchmarkRounds = 1000)
//@BenchmarkOptions(concurrency = 1, warmupRounds = 10, benchmarkRounds = 1000)
public void benchDump() throws Exception {
connectContext.setThreadLocalInfo();
UtFrameUtils.replaySql(connectContext, sql);

View File

@ -14,8 +14,6 @@
package com.starrocks.benchmark;
import com.carrotsearch.junitbenchmarks.BenchmarkOptions;
import com.carrotsearch.junitbenchmarks.BenchmarkRule;
import com.google.common.collect.Maps;
import com.starrocks.common.Config;
import com.starrocks.scheduler.Constants;
@ -27,22 +25,29 @@ import com.starrocks.scheduler.TaskRunBuilder;
import com.starrocks.scheduler.TaskRunScheduler;
import com.starrocks.server.GlobalStateMgr;
import com.starrocks.sql.optimizer.rule.transformation.materialization.MVTestBase;
import org.junit.Rule;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.rules.TestRule;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
@State(Scope.Benchmark)
@BenchmarkMode(Mode.AverageTime)
@Fork(1)
@Warmup(iterations = 1)
@Measurement(iterations = 1)
@Disabled
public class TaskSchedulerBench extends MVTestBase {
// private static final int TASK_NUM = Config.task_runs_queue_length;
private static final int TASK_NUM = 10;
@Rule
public TestRule benchRun = new BenchmarkRule();
@BeforeAll
public static void beforeClass() throws Exception {
MVTestBase.beforeClass();
@ -71,8 +76,7 @@ public class TaskSchedulerBench extends MVTestBase {
return taskRun;
}
@Test
@BenchmarkOptions(warmupRounds = 1, benchmarkRounds = 1)
@Benchmark
public void testTaskSchedulerWithDifferentTaskIds() {
TaskManager tm = GlobalStateMgr.getCurrentState().getTaskManager();
TaskRunScheduler taskRunScheduler = tm.getTaskRunScheduler();
@ -91,8 +95,7 @@ public class TaskSchedulerBench extends MVTestBase {
}
}
@Test
@BenchmarkOptions(warmupRounds = 1, benchmarkRounds = 1)
@Benchmark
public void testTaskSchedulerWithSameTaskIdsAndMergeable() {
TaskManager tm = GlobalStateMgr.getCurrentState().getTaskManager();
TaskRunScheduler taskRunScheduler = tm.getTaskRunScheduler();
@ -111,8 +114,7 @@ public class TaskSchedulerBench extends MVTestBase {
}
}
@Test
@BenchmarkOptions(warmupRounds = 1, benchmarkRounds = 1)
@Benchmark
public void testTaskSchedulerWithSameTaskIdsAndNoMergeable() {
TaskManager tm = GlobalStateMgr.getCurrentState().getTaskManager();
TaskRunScheduler taskRunScheduler = tm.getTaskRunScheduler();

View File

@ -14,25 +14,21 @@
package com.starrocks.benchmark;
import com.carrotsearch.junitbenchmarks.BenchmarkOptions;
import com.carrotsearch.junitbenchmarks.BenchmarkRule;
import com.starrocks.common.Config;
import com.starrocks.sql.optimizer.CachingMvPlanContextBuilder;
import com.starrocks.sql.optimizer.rule.transformation.materialization.MVTestBase;
import org.junit.Rule;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.rules.TestRule;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Warmup;
public class ViewBasedMvRewritePerfTest extends MVTestBase {
private static final int MV_NUM = 4;
@Rule
public TestRule benchRun = new BenchmarkRule();
@BeforeAll
public static void beforeClass() throws Exception {
MVTestBase.beforeClass();
@ -110,7 +106,9 @@ public class ViewBasedMvRewritePerfTest extends MVTestBase {
// round: 0.02 [+- 0.00], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 1, GC.time: 0.01,
// time.total: 0.35, time.warmup: 0.05, time.bench: 0.30
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = 20)
@Benchmark
@Warmup(iterations = 3)
@Measurement(iterations = 20)
public void testViewBaseRewrite_Basic() throws Exception {
final String query = "select * from join_view_1";
starRocksAssert.query(query).explainContains("mv_agg_join_1");
@ -119,7 +117,9 @@ public class ViewBasedMvRewritePerfTest extends MVTestBase {
// round: 0.01 [+- 0.00], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 0, GC.time: 0.00,
// time.total: 0.36, time.warmup: 0.06, time.bench: 0.30
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = 20)
@Benchmark
@Warmup(iterations = 3)
@Measurement(iterations = 20)
public void testViewBaseRewrite_Basic_Disable() throws Exception {
connectContext.getSessionVariable().setEnableViewBasedMvRewrite(false);
final String query = "select * from join_view_1";
@ -130,7 +130,9 @@ public class ViewBasedMvRewritePerfTest extends MVTestBase {
// round: 0.01 [+- 0.00], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 0, GC.time: 0.00,
// time.total: 0.29, time.warmup: 0.11, time.bench: 0.18
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = 20)
@Benchmark
@Warmup(iterations = 3)
@Measurement(iterations = 20)
public void testViewBaseRewrite_ViewBased_VS_Spjg() throws Exception {
final String query = "select * from t0_view_1";
starRocksAssert.query(query).explainContains("mv_agg_1");
@ -139,7 +141,9 @@ public class ViewBasedMvRewritePerfTest extends MVTestBase {
// round: 0.01 [+- 0.00], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 0, GC.time: 0.00,
// time.total: 0.19, time.warmup: 0.03, time.bench: 0.16
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = 20)
@Benchmark
@Warmup(iterations = 3)
@Measurement(iterations = 20)
public void testViewBaseRewrite_ViewBased_VS_Spjg_DisableView() throws Exception {
connectContext.getSessionVariable().setEnableViewBasedMvRewrite(false);
final String query = "select * from t0_view_1";
@ -149,7 +153,9 @@ public class ViewBasedMvRewritePerfTest extends MVTestBase {
// round: 0.03 [+- 0.01], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 1, GC.time: 0.01,
// time.total: 0.74, time.warmup: 0.12, time.bench: 0.62
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = 20)
@Benchmark
@Warmup(iterations = 3)
@Measurement(iterations = 20)
public void testViewBaseRewrite_ViewBased_withManyMvs() throws Exception {
final String query = "select * from join_view_2";
starRocksAssert.query(query).explainContains("mv_candidate_join_");
@ -158,7 +164,9 @@ public class ViewBasedMvRewritePerfTest extends MVTestBase {
// round: 0.02 [+- 0.01], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 1, GC.time: 0.02,
// time.total: 1.05, time.warmup: 0.67, time.bench: 0.38
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = 20)
@Benchmark
@Warmup(iterations = 3)
@Measurement(iterations = 20)
public void testViewBaseRewrite_ViewBased_withManyMvs_Disable() throws Exception {
connectContext.getSessionVariable().setEnableViewBasedMvRewrite(false);
final String query = "select * from join_view_2";
@ -168,7 +176,9 @@ public class ViewBasedMvRewritePerfTest extends MVTestBase {
// round: 0.03 [+- 0.01], round.block: 0.00 [+- 0.00], round.gc: 0.00 [+- 0.00], GC.calls: 1, GC.time: 0.02,
// time.total: 1.08, time.warmup: 0.52, time.bench: 0.56
@Test
@BenchmarkOptions(warmupRounds = 3, benchmarkRounds = 20)
@Benchmark
@Warmup(iterations = 3)
@Measurement(iterations = 20)
public void testViewBaseRewrite_ViewBased_withManyMvs_join() throws Exception {
final String query = "select v1.v1, total1, total2 " +
"from t2_view_1 v1 join t2_view_2 v2 " +

View File

@ -18,7 +18,7 @@
package com.starrocks.common;
import com.google.common.base.Strings;
import junit.framework.AssertionFailedError;
import org.opentest4j.AssertionFailedError;
public class ExceptionChecker {

View File

@ -37,9 +37,9 @@ import mockit.Expectations;
import mockit.Mock;
import mockit.MockUp;
import mockit.Mocked;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.concurrent.CompletableFuture;
@ -54,7 +54,7 @@ public class PulsarUtilTest {
@Mocked
BackendServiceClient client;
@Before
@BeforeEach
public void before() throws StarRocksException {
new MockUp<RunMode>() {
@Mock
@ -84,8 +84,8 @@ public class PulsarUtilTest {
PulsarRoutineLoadJob job = new PulsarRoutineLoadJob(1L, "routine_load", 1L, 1L,
"http://pulsar-service", "topic1", "sub1");
Deencapsulation.setField(job, "convertedCustomProperties", ImmutableMap.of("key1", "value1"));
LoadException e = Assert.assertThrows(LoadException.class, () -> job.getAllPulsarPartitions());
Assert.assertTrue(e.getMessage().contains("No alive backends or computeNodes"));
LoadException e = Assertions.assertThrows(LoadException.class, () -> job.getAllPulsarPartitions());
Assertions.assertTrue(e.getMessage().contains("No alive backends or computeNodes"));
}
@Test
@ -116,6 +116,6 @@ public class PulsarUtilTest {
Deencapsulation.setField(job, "convertedCustomProperties", ImmutableMap.of("key1", "value1"));
List<String> result = job.getAllPulsarPartitions();
Assert.assertEquals(partitions, result);
Assertions.assertEquals(partitions, result);
}
}

View File

@ -20,7 +20,7 @@ import com.starrocks.catalog.Column;
import com.starrocks.catalog.Type;
import org.apache.iceberg.Schema;
import org.apache.iceberg.types.Types;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import java.nio.ByteBuffer;
import java.util.HashMap;

View File

@ -30,14 +30,13 @@ import com.starrocks.thrift.TExplainLevel;
import com.starrocks.utframe.StarRocksAssert;
import com.starrocks.utframe.UtFrameUtils;
import org.apache.commons.lang3.StringUtils;
import org.junit.Rule;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.rules.ErrorCollector;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@ -47,8 +46,8 @@ public class TrinoTestBase {
public static ConnectContext connectContext;
public static StarRocksAssert starRocksAssert;
@Rule
public ErrorCollector collector = new ErrorCollector();
// Store test failures to be reported at the end of the test
private final List<Throwable> testFailures = new ArrayList<>();
@BeforeAll
public static void beforeClass() throws Exception {
@ -154,6 +153,20 @@ public class TrinoTestBase {
connectContext.getSessionVariable().setCboPushDownGroupingSet(false);
}
// Method to collect errors (replacement for ErrorCollector)
protected void addError(Throwable error) {
testFailures.add(error);
}
// Method to verify no errors were collected (call at the end of test methods)
protected void verifyNoErrors() {
if (!testFailures.isEmpty()) {
Throwable firstError = testFailures.get(0);
testFailures.clear();
Assertions.fail("Test failures: " + firstError.getMessage(), firstError);
}
}
public static StatementBase analyzeSuccess(String originStmt) {
try {
StatementBase statementBase = com.starrocks.sql.parser.SqlParser.parse(originStmt,
@ -288,7 +301,7 @@ public class TrinoTestBase {
checkWithIgnoreTabletList(result.toString().trim(), pair.first.trim());
}
} catch (Error error) {
collector.addError(new Throwable(nth + " plan " + "\n" + sql, error));
addError(new Throwable(nth + " plan " + "\n" + sql, error));
}
hasResult = false;
@ -305,6 +318,9 @@ public class TrinoTestBase {
break;
}
}
// Verify no errors were collected at the end of the test
// there are bugs in TrinoTPCHTest, should be called after TrinoTPCHTest is fixed
// verifyNoErrors();
} catch (Exception e) {
System.out.println(sql);
e.printStackTrace();

View File

@ -85,7 +85,6 @@ import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpVersion;
import junit.framework.AssertionFailedError;
import mockit.Expectations;
import mockit.Mock;
import mockit.MockUp;
@ -99,6 +98,7 @@ import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.opentest4j.AssertionFailedError;
import java.io.IOException;
import java.net.ServerSocket;

View File

@ -37,6 +37,8 @@ import org.apache.logging.log4j.Logger;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.State;
import java.util.List;
import java.util.Locale;
@ -44,6 +46,7 @@ import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@State(Scope.Thread) // Add State annotation with appropriate scope
public class MaterializedViewTestBase extends PlanTestBase {
protected static final Logger LOG = LogManager.getLogger(MaterializedViewTestBase.class);

View File

@ -23,24 +23,25 @@ import com.starrocks.sql.optimizer.transformer.LogicalPlan;
import com.starrocks.sql.optimizer.transformer.RelationTransformer;
import com.starrocks.utframe.StarRocksAssert;
import com.starrocks.utframe.UtFrameUtils;
import org.junit.Rule;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.rules.ErrorCollector;
import org.opentest4j.AssertionFailedError;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class TransformerTest {
private static ConnectContext connectContext;
private static StarRocksAssert starRocksAssert;
private static String DB_NAME = "test";
@Rule
public ErrorCollector collector = new ErrorCollector();
// Using List to collect errors instead of JUnit 4 ErrorCollector
private final List<Throwable> errors = new ArrayList<>();
@BeforeAll
public static void beforeClass() throws Exception {
@ -137,8 +138,7 @@ public class TransformerTest {
runUnitTest("pivot");
}
public static void analyzeAndBuildOperator(String originStmt, String operatorString, String except,
ErrorCollector collector) {
public void analyzeAndBuildOperator(String originStmt, String operatorString, String except) {
try {
StatementBase statementBase = com.starrocks.sql.parser.SqlParser.parse(originStmt,
connectContext.getSessionVariable().getSqlMode()).get(0);
@ -150,8 +150,8 @@ public class TransformerTest {
try {
Assertions.assertEquals(operatorString.substring(0, operatorString.length() - 1),
LogicalPlanPrinter.print(logicalPlan.getRoot()));
} catch (Error error) {
collector.addError(new Throwable("\n" + originStmt, error));
} catch (AssertionFailedError error) {
errors.add(new Throwable("\n" + originStmt, error));
}
} catch (Exception ex) {
if (!except.isEmpty()) {
@ -201,7 +201,7 @@ public class TransformerTest {
mode = "except";
continue;
} else if (tempStr.equals("[end]")) {
analyzeAndBuildOperator(sql, result, except, collector);
analyzeAndBuildOperator(sql, result, except);
continue;
}
@ -214,6 +214,14 @@ public class TransformerTest {
}
}
reader.close();
// Report any collected errors after test completion (JUnit 5 style)
if (!errors.isEmpty()) {
AssertionFailedError error = new AssertionFailedError(
"There were " + errors.size() + " errors in test " + filename);
errors.forEach(error::addSuppressed);
throw error;
}
} catch (IOException e) {
e.printStackTrace();
} finally {

View File

@ -74,8 +74,8 @@
<artifactId>RoaringBitmap</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>

View File

@ -17,9 +17,9 @@
package com.starrocks.types;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@ -29,7 +29,7 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Arrays;
import static org.junit.Assert.assertEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class BitmapValueTest {
static BitmapValue emptyBitmap;
@ -37,7 +37,7 @@ public class BitmapValueTest {
static BitmapValue mediumBitmap;
static BitmapValue largeBitmap;
@BeforeClass
@BeforeAll
public static void beforeClass() throws Exception {
emptyBitmap = new BitmapValue();
singleBitmap = new BitmapValue();
@ -54,21 +54,21 @@ public class BitmapValueTest {
}
private void checkBitmap(BitmapValue bitmap, int bitmapType, long start, long end) {
Assert.assertEquals(bitmap.getBitmapType(), bitmapType);
Assert.assertEquals(bitmap.cardinality(), end - start);
Assertions.assertEquals(bitmap.getBitmapType(), bitmapType);
Assertions.assertEquals(bitmap.cardinality(), end - start);
for (long i = start; i < end; i++) {
Assert.assertTrue(bitmap.contains(i));
Assertions.assertTrue(bitmap.contains(i));
}
}
private void checkBitmap(BitmapValue bitmap, int bitmapType, long start1, long end1, long start2, long end2) {
Assert.assertEquals(bitmap.getBitmapType(), bitmapType);
Assert.assertEquals(bitmap.cardinality(), (end1 - start1) + (end2 - start2));
Assertions.assertEquals(bitmap.getBitmapType(), bitmapType);
Assertions.assertEquals(bitmap.cardinality(), (end1 - start1) + (end2 - start2));
for (long i = start1; i < end1; i++) {
Assert.assertTrue(bitmap.contains(i));
Assertions.assertTrue(bitmap.contains(i));
}
for (long i = start2; i < end2; i++) {
Assert.assertTrue(bitmap.contains(i));
Assertions.assertTrue(bitmap.contains(i));
}
}
@ -490,7 +490,7 @@ public class BitmapValueTest {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
DataOutput output = new DataOutputStream(outputStream);
emptyBitmap.serialize(output);
Assert.assertEquals("[0]", Arrays.toString(outputStream.toByteArray()));
Assertions.assertEquals("[0]", Arrays.toString(outputStream.toByteArray()));
DataInputStream inputStream = new DataInputStream(new ByteArrayInputStream(outputStream.toByteArray()));
BitmapValue outputBitmap = new BitmapValue();
@ -503,7 +503,7 @@ public class BitmapValueTest {
output = new DataOutputStream(outputStream);
inputBitmap.serialize(output);
// check serialize by little endian
Assert.assertEquals("[1, 0, 0, 0, -128]", Arrays.toString(outputStream.toByteArray()));
Assertions.assertEquals("[1, 0, 0, 0, -128]", Arrays.toString(outputStream.toByteArray()));
inputStream = new DataInputStream(new ByteArrayInputStream(outputStream.toByteArray()));
outputBitmap = new BitmapValue();
@ -518,7 +518,7 @@ public class BitmapValueTest {
output = new DataOutputStream(outputStream);
inputBitmap.serialize(output);
// check serialize by little endian
Assert.assertEquals("[3, 1, 0, 0, 0, 1, 0, 0, 0]", Arrays.toString(outputStream.toByteArray()));
Assertions.assertEquals("[3, 1, 0, 0, 0, 1, 0, 0, 0]", Arrays.toString(outputStream.toByteArray()));
inputStream = new DataInputStream(new ByteArrayInputStream(outputStream.toByteArray()));
outputBitmap = new BitmapValue();
@ -530,7 +530,7 @@ public class BitmapValueTest {
outputStream = new ByteArrayOutputStream();
output = new DataOutputStream(outputStream);
inputBitmap.serialize(output);
Assert.assertEquals("[10, 2, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0]",
Assertions.assertEquals("[10, 2, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0]",
Arrays.toString(outputStream.toByteArray()));
// bitmap
@ -571,10 +571,10 @@ public class BitmapValueTest {
long unsigned32bit = Integer.MAX_VALUE;
bitmapValue.add(unsigned32bit + 1);
Assert.assertTrue(bitmapValue.is32BitsEnough());
Assertions.assertTrue(bitmapValue.is32BitsEnough());
bitmapValue.add(Long.MAX_VALUE);
Assert.assertFalse(bitmapValue.is32BitsEnough());
Assertions.assertFalse(bitmapValue.is32BitsEnough());
}
@Test
@ -602,19 +602,19 @@ public class BitmapValueTest {
@Test
public void testContains() {
// empty
Assert.assertFalse(emptyBitmap.contains(1));
Assertions.assertFalse(emptyBitmap.contains(1));
// single value
Assert.assertTrue(singleBitmap.contains(1));
Assert.assertFalse(singleBitmap.contains(2));
Assertions.assertTrue(singleBitmap.contains(1));
Assertions.assertFalse(singleBitmap.contains(2));
// bitmap
Assert.assertTrue(largeBitmap.contains(1));
Assert.assertFalse(largeBitmap.contains(100));
Assertions.assertTrue(largeBitmap.contains(1));
Assertions.assertFalse(largeBitmap.contains(100));
// set
Assert.assertTrue(mediumBitmap.contains(1));
Assert.assertFalse(mediumBitmap.contains(20));
Assertions.assertTrue(mediumBitmap.contains(1));
Assertions.assertFalse(mediumBitmap.contains(20));
}
@Test
@ -626,37 +626,37 @@ public class BitmapValueTest {
// empty == single value
emp2.add(1);
Assert.assertNotEquals(emp1, emp2);
Assertions.assertNotEquals(emp1, emp2);
// empty == bitmap
emp2.add(2);
Assert.assertNotEquals(emp1, emp2);
Assertions.assertNotEquals(emp1, emp2);
// single value = empty
BitmapValue sgv = new BitmapValue();
sgv.add(1);
BitmapValue emp3 = new BitmapValue();
Assert.assertNotEquals(sgv, emp3);
Assertions.assertNotEquals(sgv, emp3);
// single value = single value
BitmapValue sgv1 = new BitmapValue();
sgv1.add(1);
BitmapValue sgv2 = new BitmapValue();
sgv2.add(2);
assertEquals(sgv, sgv1);
Assert.assertNotEquals(sgv, sgv2);
Assertions.assertNotEquals(sgv, sgv2);
// single value = bitmap
sgv2.add(3);
Assert.assertNotEquals(sgv, sgv2);
Assertions.assertNotEquals(sgv, sgv2);
// bitmap == empty
BitmapValue bitmapValue = new BitmapValue();
bitmapValue.add(1);
bitmapValue.add(2);
BitmapValue emp4 = new BitmapValue();
Assert.assertNotEquals(bitmapValue, emp4);
Assertions.assertNotEquals(bitmapValue, emp4);
// bitmap == singlevalue
BitmapValue sgv3 = new BitmapValue();
sgv3.add(1);
Assert.assertNotEquals(bitmapValue, sgv3);
Assertions.assertNotEquals(bitmapValue, sgv3);
// bitmap == bitmap
BitmapValue bitmapValue1 = new BitmapValue();
bitmapValue1.add(1);
@ -664,24 +664,24 @@ public class BitmapValueTest {
bitmapValue2.add(1);
bitmapValue2.add(2);
assertEquals(bitmapValue, bitmapValue2);
Assert.assertNotEquals(bitmapValue, bitmapValue1);
Assertions.assertNotEquals(bitmapValue, bitmapValue1);
}
@Test
public void testToString() {
Assert.assertEquals(emptyBitmap.toString(), "{}");
Assert.assertEquals(singleBitmap.toString(), "{1}");
Assert.assertEquals(mediumBitmap.toString(), "{0,1,2,3,4,5,6,7,8,9}");
Assert.assertEquals(largeBitmap.toString(), "{0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22," +
Assertions.assertEquals(emptyBitmap.toString(), "{}");
Assertions.assertEquals(singleBitmap.toString(), "{1}");
Assertions.assertEquals(mediumBitmap.toString(), "{0,1,2,3,4,5,6,7,8,9}");
Assertions.assertEquals(largeBitmap.toString(), "{0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22," +
"23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39}");
}
@Test
public void testSerializeToString() {
Assert.assertEquals(emptyBitmap.serializeToString(), "");
Assert.assertEquals(singleBitmap.serializeToString(), "1");
Assert.assertEquals(mediumBitmap.serializeToString(), "0,1,2,3,4,5,6,7,8,9");
Assert.assertEquals(largeBitmap.serializeToString(), "0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20," +
Assertions.assertEquals(emptyBitmap.serializeToString(), "");
Assertions.assertEquals(singleBitmap.serializeToString(), "1");
Assertions.assertEquals(mediumBitmap.serializeToString(), "0,1,2,3,4,5,6,7,8,9");
Assertions.assertEquals(largeBitmap.serializeToString(), "0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20," +
"21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39");
BitmapValue bitmap = new BitmapValue();
@ -691,6 +691,6 @@ public class BitmapValueTest {
bitmap.add(100);
bitmap.add(5);
bitmap.add(102);
Assert.assertEquals(bitmap.setToString(), "1,2,3,5,100,102");
Assertions.assertEquals(bitmap.setToString(), "1,2,3,5,100,102");
}
}

View File

@ -14,14 +14,14 @@
package com.starrocks.types;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
public class Roaring64MapTest {
static BitmapValue largeBitmap;
@BeforeClass
@BeforeAll
public static void beforeClass() throws Exception {
largeBitmap = new BitmapValue();
for (long i = 0; i < 20; i++) {
@ -31,6 +31,6 @@ public class Roaring64MapTest {
@Test
public void testSerializeToString() {
Assert.assertEquals("0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19", largeBitmap.serializeToString());
Assertions.assertEquals("0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19", largeBitmap.serializeToString());
}
}

View File

@ -408,6 +408,10 @@ under the License.
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-utils</artifactId>
</exclusion>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
</exclusions>
</dependency>
@ -427,6 +431,10 @@ under the License.
<groupId>com.baidu</groupId>
<artifactId>jprotobuf</artifactId>
</exclusion>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
</exclusions>
</dependency>
@ -437,11 +445,11 @@ under the License.
<version>20231013</version>
</dependency>
<!-- https://mvnrepository.com/artifact/junit/junit -->
<!-- https://mvnrepository.com/artifact/org.junit.jupiter/junit-jupiter -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.1</version>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<version>5.8.2</version>
<scope>test</scope>
</dependency>
@ -1137,6 +1145,10 @@ under the License.
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
</exclusion>
<exclusion>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
</exclusions>
</dependency>

View File

@ -83,10 +83,10 @@ under the License.
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/junit/junit -->
<!-- https://mvnrepository.com/artifact/org.junit.jupiter/junit-jupiter -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
@ -332,7 +332,6 @@ under the License.
</execution>
</executions>
</plugin>
</plugins>
<pluginManagement>

View File

@ -18,8 +18,8 @@
package com.starrocks.load.loadv2.dpp;
import com.starrocks.load.loadv2.etl.EtlJobConfig;
import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class ColumnParserTest {
@ -30,83 +30,83 @@ public class ColumnParserTest {
TinyIntParser tinyIntParser = new TinyIntParser();
// 1 normal
String tinyint = "100";
Assert.assertTrue(tinyIntParser.parse(tinyint));
Assertions.assertTrue(tinyIntParser.parse(tinyint));
// 2 upper
String tinyintUpper = "128";
Assert.assertFalse(tinyIntParser.parse(tinyintUpper));
Assertions.assertFalse(tinyIntParser.parse(tinyintUpper));
// 3 lower
String tinyintLower = "-129";
Assert.assertFalse(tinyIntParser.parse(tinyintLower));
Assertions.assertFalse(tinyIntParser.parse(tinyintLower));
// smallint
SmallIntParser smallIntParser = new SmallIntParser();
// 1 normal
String smallint = "100";
Assert.assertTrue(smallIntParser.parse(smallint));
Assertions.assertTrue(smallIntParser.parse(smallint));
// 2 upper
String smallintUpper = "32768";
Assert.assertFalse(smallIntParser.parse(smallintUpper));
Assertions.assertFalse(smallIntParser.parse(smallintUpper));
// 3 lower
String smallintLower = "-32769";
Assert.assertFalse(smallIntParser.parse(smallintLower));
Assertions.assertFalse(smallIntParser.parse(smallintLower));
// int
IntParser intParser = new IntParser();
// 1 normal
String intValue = "100";
Assert.assertTrue(intParser.parse(intValue));
Assertions.assertTrue(intParser.parse(intValue));
// 2 upper
String intUpper = "2147483648";
Assert.assertFalse(intParser.parse(intUpper));
Assertions.assertFalse(intParser.parse(intUpper));
// 3 lower
String intLower = "-2147483649";
Assert.assertFalse(intParser.parse(intLower));
Assertions.assertFalse(intParser.parse(intLower));
// bigint
BigIntParser bigIntParser = new BigIntParser();
// 1 normal
String bigint = "100";
Assert.assertTrue(bigIntParser.parse(bigint));
Assertions.assertTrue(bigIntParser.parse(bigint));
// 2 upper
String bigintUpper = "9223372036854775808";
Assert.assertFalse(bigIntParser.parse(bigintUpper));
Assertions.assertFalse(bigIntParser.parse(bigintUpper));
// 3 lower
String bigintLower = "-9223372036854775809";
Assert.assertFalse(bigIntParser.parse(bigintLower));
Assertions.assertFalse(bigIntParser.parse(bigintLower));
// largeint
LargeIntParser largeIntParser = new LargeIntParser();
// 1 normal
String largeint = "100";
Assert.assertTrue(largeIntParser.parse(largeint));
Assertions.assertTrue(largeIntParser.parse(largeint));
// 2 upper
String largeintUpper = "170141183460469231731687303715884105728";
Assert.assertFalse(largeIntParser.parse(largeintUpper));
Assertions.assertFalse(largeIntParser.parse(largeintUpper));
// 3 lower
String largeintLower = "-170141183460469231731687303715884105729";
Assert.assertFalse(largeIntParser.parse(largeintLower));
Assertions.assertFalse(largeIntParser.parse(largeintLower));
// float
FloatParser floatParser = new FloatParser();
// normal
String floatValue = "1.1";
Assert.assertTrue(floatParser.parse(floatValue));
Assertions.assertTrue(floatParser.parse(floatValue));
// inf
String inf = "Infinity";
Assert.assertFalse(floatParser.parse(inf));
Assertions.assertFalse(floatParser.parse(inf));
// nan
String nan = "NaN";
// failed
Assert.assertFalse(floatParser.parse(nan));
Assertions.assertFalse(floatParser.parse(nan));
// double
DoubleParser doubleParser = new DoubleParser();
// normal
Assert.assertTrue(doubleParser.parse(floatValue));
Assertions.assertTrue(doubleParser.parse(floatValue));
// inf
Assert.assertFalse(doubleParser.parse(inf));
Assertions.assertFalse(doubleParser.parse(inf));
// nan
Assert.assertFalse(doubleParser.parse(nan));
Assertions.assertFalse(doubleParser.parse(nan));
// decimal
EtlJobConfig.EtlColumn etlColumn = new EtlJobConfig.EtlColumn();
@ -115,10 +115,10 @@ public class ColumnParserTest {
DecimalParser decimalParser = new DecimalParser(etlColumn);
// normal
String decimalValue = "10.333";
Assert.assertTrue(decimalParser.parse(decimalValue));
Assertions.assertTrue(decimalParser.parse(decimalValue));
// overflow
String decimalOverflow = "1000.3333333333";
Assert.assertFalse(decimalParser.parse(decimalOverflow));
Assertions.assertFalse(decimalParser.parse(decimalOverflow));
// string
EtlJobConfig.EtlColumn stringColumn = new EtlJobConfig.EtlColumn();
@ -126,10 +126,10 @@ public class ColumnParserTest {
StringParser stringParser = new StringParser(stringColumn);
// normal
String stringnormal = "a";
Assert.assertTrue(stringParser.parse(stringnormal));
Assertions.assertTrue(stringParser.parse(stringnormal));
// overflow
String stringoverflow = "中文";
Assert.assertFalse(stringParser.parse(stringoverflow));
Assertions.assertFalse(stringParser.parse(stringoverflow));
}
}

View File

@ -23,8 +23,8 @@ import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.DecimalType;
import org.apache.spark.sql.types.StructType;
import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
@ -41,28 +41,28 @@ public class DppUtilsTest {
DppUtils dppUtils = new DppUtils();
Class stringResult = dppUtils.getClassFromDataType(DataTypes.StringType);
Assert.assertEquals(String.class, stringResult);
Assertions.assertEquals(String.class, stringResult);
Class booleanResult = dppUtils.getClassFromDataType(DataTypes.BooleanType);
Assert.assertEquals(Boolean.class, booleanResult);
Assertions.assertEquals(Boolean.class, booleanResult);
Class shortResult = dppUtils.getClassFromDataType(DataTypes.ShortType);
Assert.assertEquals(Short.class, shortResult);
Assertions.assertEquals(Short.class, shortResult);
Class integerResult = dppUtils.getClassFromDataType(DataTypes.IntegerType);
Assert.assertEquals(Integer.class, integerResult);
Assertions.assertEquals(Integer.class, integerResult);
Class longResult = dppUtils.getClassFromDataType(DataTypes.LongType);
Assert.assertEquals(Long.class, longResult);
Assertions.assertEquals(Long.class, longResult);
Class floatResult = dppUtils.getClassFromDataType(DataTypes.FloatType);
Assert.assertEquals(Float.class, floatResult);
Assertions.assertEquals(Float.class, floatResult);
Class doubleResult = dppUtils.getClassFromDataType(DataTypes.DoubleType);
Assert.assertEquals(Double.class, doubleResult);
Assertions.assertEquals(Double.class, doubleResult);
Class dateResult = dppUtils.getClassFromDataType(DataTypes.DateType);
Assert.assertEquals(Date.class, dateResult);
Assertions.assertEquals(Date.class, dateResult);
}
@Test
@ -73,73 +73,73 @@ public class DppUtilsTest {
EtlJobConfig.EtlColumn column = new EtlJobConfig.EtlColumn();
column.columnType = "CHAR";
Class charResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(String.class, charResult);
Assertions.assertEquals(String.class, charResult);
column.columnType = "HLL";
Class hllResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(String.class, hllResult);
Assertions.assertEquals(String.class, hllResult);
column.columnType = "OBJECT";
Class objectResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(String.class, objectResult);
Assertions.assertEquals(String.class, objectResult);
column.columnType = "BOOLEAN";
Class booleanResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(Boolean.class, booleanResult);
Assertions.assertEquals(Boolean.class, booleanResult);
column.columnType = "TINYINT";
Class tinyResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(Short.class, tinyResult);
Assertions.assertEquals(Short.class, tinyResult);
column.columnType = "SMALLINT";
Class smallResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(Short.class, smallResult);
Assertions.assertEquals(Short.class, smallResult);
column.columnType = "INT";
Class integerResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(Integer.class, integerResult);
Assertions.assertEquals(Integer.class, integerResult);
column.columnType = "DATETIME";
Class datetimeResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(java.sql.Timestamp.class, datetimeResult);
Assertions.assertEquals(java.sql.Timestamp.class, datetimeResult);
column.columnType = "FLOAT";
Class floatResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(Float.class, floatResult);
Assertions.assertEquals(Float.class, floatResult);
column.columnType = "DOUBLE";
Class doubleResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(Double.class, doubleResult);
Assertions.assertEquals(Double.class, doubleResult);
column.columnType = "DATE";
Class dateResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(Date.class, dateResult);
Assertions.assertEquals(Date.class, dateResult);
column.columnType = "DECIMALV2";
column.precision = 10;
column.scale = 2;
Class decimalResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(BigDecimal.valueOf(10, 2).getClass(), decimalResult);
Assertions.assertEquals(BigDecimal.valueOf(10, 2).getClass(), decimalResult);
column.columnType = "DECIMAL32";
column.precision = 7;
column.scale = 2;
decimalResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(BigDecimal.valueOf(7, 2).getClass(), decimalResult);
Assertions.assertEquals(BigDecimal.valueOf(7, 2).getClass(), decimalResult);
column.columnType = "DECIMAL64";
column.precision = 15;
column.scale = 3;
decimalResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(BigDecimal.valueOf(15, 3).getClass(), decimalResult);
Assertions.assertEquals(BigDecimal.valueOf(15, 3).getClass(), decimalResult);
column.columnType = "DECIMAL128";
column.precision = 34;
column.scale = 4;
decimalResult = dppUtils.getClassFromColumn(column);
Assert.assertEquals(BigDecimal.valueOf(34, 4).getClass(), decimalResult);
Assertions.assertEquals(BigDecimal.valueOf(34, 4).getClass(), decimalResult);
} catch (Exception e) {
Assert.assertFalse(false);
Assertions.assertFalse(false);
}
}
@ -152,75 +152,75 @@ public class DppUtilsTest {
EtlJobConfig.EtlColumn column = new EtlJobConfig.EtlColumn();
column.columnType = "VARCHAR";
DataType stringResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DataTypes.StringType, stringResult);
Assertions.assertEquals(DataTypes.StringType, stringResult);
column.columnType = "CHAR";
DataType charResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DataTypes.StringType, charResult);
Assertions.assertEquals(DataTypes.StringType, charResult);
column.columnType = "HLL";
DataType hllResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DataTypes.StringType, hllResult);
Assertions.assertEquals(DataTypes.StringType, hllResult);
column.columnType = "OBJECT";
DataType objectResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DataTypes.StringType, objectResult);
Assertions.assertEquals(DataTypes.StringType, objectResult);
column.columnType = "BOOLEAN";
DataType booleanResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DataTypes.StringType, booleanResult);
Assertions.assertEquals(DataTypes.StringType, booleanResult);
column.columnType = "TINYINT";
DataType tinyResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DataTypes.ByteType, tinyResult);
Assertions.assertEquals(DataTypes.ByteType, tinyResult);
column.columnType = "SMALLINT";
DataType smallResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DataTypes.ShortType, smallResult);
Assertions.assertEquals(DataTypes.ShortType, smallResult);
column.columnType = "INT";
DataType integerResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DataTypes.IntegerType, integerResult);
Assertions.assertEquals(DataTypes.IntegerType, integerResult);
column.columnType = "BIGINT";
DataType longResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DataTypes.LongType, longResult);
Assertions.assertEquals(DataTypes.LongType, longResult);
column.columnType = "DATETIME";
DataType datetimeResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DataTypes.TimestampType, datetimeResult);
Assertions.assertEquals(DataTypes.TimestampType, datetimeResult);
column.columnType = "FLOAT";
DataType floatResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DataTypes.FloatType, floatResult);
Assertions.assertEquals(DataTypes.FloatType, floatResult);
column.columnType = "DOUBLE";
DataType doubleResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DataTypes.DoubleType, doubleResult);
Assertions.assertEquals(DataTypes.DoubleType, doubleResult);
column.columnType = "DATE";
DataType dateResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DataTypes.DateType, dateResult);
Assertions.assertEquals(DataTypes.DateType, dateResult);
column.columnType = "DECIMAL32";
column.precision = 7;
column.scale = 2;
DataType decimalResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DecimalType.apply(7, 2), decimalResult);
Assertions.assertEquals(DecimalType.apply(7, 2), decimalResult);
column.columnType = "DECIMAL64";
column.precision = 15;
column.scale = 3;
decimalResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DecimalType.apply(15, 3), decimalResult);
Assertions.assertEquals(DecimalType.apply(15, 3), decimalResult);
column.columnType = "DECIMAL128";
column.precision = 34;
column.scale = 4;
decimalResult = dppUtils.getDataTypeFromColumn(column, false);
Assert.assertEquals(DecimalType.apply(34, 4), decimalResult);
Assertions.assertEquals(DecimalType.apply(34, 4), decimalResult);
} catch (Exception e) {
Assert.assertTrue(false);
Assertions.assertTrue(false);
}
}
@ -244,17 +244,17 @@ public class DppUtilsTest {
try {
StructType schema = dppUtils.createDstTableSchema(columns, false, false);
Assert.assertEquals(2, schema.fieldNames().length);
Assert.assertEquals("column1", schema.fieldNames()[0]);
Assert.assertEquals("column2", schema.fieldNames()[1]);
Assertions.assertEquals(2, schema.fieldNames().length);
Assertions.assertEquals("column1", schema.fieldNames()[0]);
Assertions.assertEquals("column2", schema.fieldNames()[1]);
StructType schema2 = dppUtils.createDstTableSchema(columns, true, false);
Assert.assertEquals(3, schema2.fieldNames().length);
Assert.assertEquals("__bucketId__", schema2.fieldNames()[0]);
Assert.assertEquals("column1", schema2.fieldNames()[1]);
Assert.assertEquals("column2", schema2.fieldNames()[2]);
Assertions.assertEquals(3, schema2.fieldNames().length);
Assertions.assertEquals("__bucketId__", schema2.fieldNames()[0]);
Assertions.assertEquals("column1", schema2.fieldNames()[1]);
Assertions.assertEquals("column2", schema2.fieldNames()[2]);
} catch (Exception e) {
Assert.assertTrue(false);
Assertions.assertTrue(false);
}
}
@ -268,11 +268,11 @@ public class DppUtilsTest {
columnFromPaths.add("date");
try {
List<String> columnFromPathValues = dppUtils.parseColumnsFromPath(path, columnFromPaths);
Assert.assertEquals(2, columnFromPathValues.size());
Assert.assertEquals("beijing", columnFromPathValues.get(0));
Assert.assertEquals("2020-04-10", columnFromPathValues.get(1));
Assertions.assertEquals(2, columnFromPathValues.size());
Assertions.assertEquals("beijing", columnFromPathValues.get(0));
Assertions.assertEquals("2020-04-10", columnFromPathValues.get(1));
} catch (Exception e) {
Assert.assertTrue(false);
Assertions.assertTrue(false);
}
}
@ -285,99 +285,99 @@ public class DppUtilsTest {
bf = DppUtils.getHashValue(null, DataTypes.IntegerType, column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(558161692L, hashValue.getValue());
Assertions.assertEquals(558161692L, hashValue.getValue());
column = new EtlJobConfig.EtlColumn("k1", "TINYINT", true, true, "NONE", "0", 0, 0, 0);
bf = DppUtils.getHashValue(new Byte((byte) 1), DataTypes.ByteType, column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(2768625435L, hashValue.getValue());
Assertions.assertEquals(2768625435L, hashValue.getValue());
column = new EtlJobConfig.EtlColumn("k1", "SMALLINT", true, true, "NONE", "0", 0, 0, 0);
bf = DppUtils.getHashValue(new Short((short) 1), DataTypes.ShortType, column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(1489118142L, hashValue.getValue());
Assertions.assertEquals(1489118142L, hashValue.getValue());
column = new EtlJobConfig.EtlColumn("k1", "INT", true, true, "NONE", "0", 0, 0, 0);
bf = DppUtils.getHashValue(new Integer(1), DataTypes.IntegerType, column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(2583214201L, hashValue.getValue());
Assertions.assertEquals(2583214201L, hashValue.getValue());
column = new EtlJobConfig.EtlColumn("k1", "BIGINT", true, true, "NONE", "0", 0, 0, 0);
bf = DppUtils.getHashValue(new Long(1), DataTypes.LongType, column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(2844319735L, hashValue.getValue());
Assertions.assertEquals(2844319735L, hashValue.getValue());
column = new EtlJobConfig.EtlColumn("k1", "VARCHAR", true, true, "NONE", "0", 100, 0, 0);
bf = DppUtils.getHashValue("12345abcde", DataTypes.StringType, column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(603981213L, hashValue.getValue());
Assertions.assertEquals(603981213L, hashValue.getValue());
column = new EtlJobConfig.EtlColumn("k1", "BOOLEAN", true, true, "NONE", "0", 0, 0, 0);
bf = DppUtils.getHashValue(new Boolean(true), DataTypes.BooleanType, column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(2768625435L, hashValue.getValue());
Assertions.assertEquals(2768625435L, hashValue.getValue());
// date
column = new EtlJobConfig.EtlColumn("k1", "DATE", true, true, "NONE", "0", 0, 0, 0);
bf = DppUtils.getHashValue(Date.valueOf("2023-07-11"), DataTypes.DateType, column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(791129379L, hashValue.getValue());
Assertions.assertEquals(791129379L, hashValue.getValue());
// datetime
column = new EtlJobConfig.EtlColumn("k1", "DATETIME", true, true, "NONE", "0", 0, 0, 0);
bf = DppUtils.getHashValue(Timestamp.valueOf("2023-07-11 12:12:12"), DataTypes.TimestampType, column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(541277948L, hashValue.getValue());
Assertions.assertEquals(541277948L, hashValue.getValue());
column = new EtlJobConfig.EtlColumn("k1", "DATETIME", true, true, "NONE", "0", 0, 0, 0);
bf = DppUtils.getHashValue(Timestamp.valueOf("2023-07-11 12:12:12.123456"), DataTypes.TimestampType, column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(2559661226L, hashValue.getValue());
Assertions.assertEquals(2559661226L, hashValue.getValue());
// decimal
column = new EtlJobConfig.EtlColumn("k1", "DECIMALV2", true, true, "NONE", "0", 0, 27, 9);
bf = DppUtils.getHashValue(new BigDecimal("1.234"), DecimalType.apply(27, 9), column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(3199857971L, hashValue.getValue());
Assertions.assertEquals(3199857971L, hashValue.getValue());
column = new EtlJobConfig.EtlColumn("k1", "DECIMALV2", true, true, "NONE", "0", 0, 20, 3);
bf = DppUtils.getHashValue(new BigDecimal("1.234"), DecimalType.apply(20, 3), column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(3199857971L, hashValue.getValue());
Assertions.assertEquals(3199857971L, hashValue.getValue());
column = new EtlJobConfig.EtlColumn("k1", "DECIMAL32", true, true, "NONE", "0", 0, 9, 3);
bf = DppUtils.getHashValue(new BigDecimal("1.234"), DecimalType.apply(9, 3), column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(2853177042L, hashValue.getValue());
Assertions.assertEquals(2853177042L, hashValue.getValue());
column = new EtlJobConfig.EtlColumn("k1", "DECIMAL64", true, true, "NONE", "0", 0, 18, 6);
bf = DppUtils.getHashValue(new BigDecimal("1.234"), DecimalType.apply(18, 6), column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(563521641L, hashValue.getValue());
Assertions.assertEquals(563521641L, hashValue.getValue());
column = new EtlJobConfig.EtlColumn("k1", "DECIMAL128", true, true, "NONE", "0", 0, 38, 9);
bf = DppUtils.getHashValue(new BigDecimal("1.234"), DecimalType.apply(38, 9), column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(4291267310L, hashValue.getValue());
Assertions.assertEquals(4291267310L, hashValue.getValue());
column = new EtlJobConfig.EtlColumn("k1", "DECIMAL128", true, true, "NONE", "0", 0, 27, 9);
bf = DppUtils.getHashValue(new BigDecimal("1.234"), DecimalType.apply(27, 9), column);
hashValue.reset();
hashValue.update(bf.array(), 0, bf.limit());
Assert.assertEquals(3199857971L, hashValue.getValue());
Assertions.assertEquals(3199857971L, hashValue.getValue());
}
}

View File

@ -17,8 +17,8 @@
package com.starrocks.load.loadv2.dpp;
import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@ -31,10 +31,10 @@ public class HllTest {
@Test
public void testFindFirstNonZeroBitPosition() {
Assert.assertTrue(Hll.getLongTailZeroNum(0) == 0);
Assert.assertTrue(Hll.getLongTailZeroNum(1) == 0);
Assert.assertTrue(Hll.getLongTailZeroNum(1l << 30) == 30);
Assert.assertTrue(Hll.getLongTailZeroNum(1l << 62) == 62);
Assertions.assertTrue(Hll.getLongTailZeroNum(0) == 0);
Assertions.assertTrue(Hll.getLongTailZeroNum(1) == 0);
Assertions.assertTrue(Hll.getLongTailZeroNum(1L << 30) == 30);
Assertions.assertTrue(Hll.getLongTailZeroNum(1L << 62) == 62);
}
@Test
@ -42,8 +42,8 @@ public class HllTest {
// test empty
Hll emptyHll = new Hll();
Assert.assertTrue(emptyHll.getType() == Hll.HLL_DATA_EMPTY);
Assert.assertTrue(emptyHll.estimateCardinality() == 0);
Assertions.assertTrue(emptyHll.getType() == Hll.HLL_DATA_EMPTY);
Assertions.assertTrue(emptyHll.estimateCardinality() == 0);
ByteArrayOutputStream emptyOutputStream = new ByteArrayOutputStream();
DataOutput output = new DataOutputStream(emptyOutputStream);
@ -52,15 +52,15 @@ public class HllTest {
new DataInputStream(new ByteArrayInputStream(emptyOutputStream.toByteArray()));
Hll deserializedEmptyHll = new Hll();
deserializedEmptyHll.deserialize(emptyInputStream);
Assert.assertTrue(deserializedEmptyHll.getType() == Hll.HLL_DATA_EMPTY);
Assertions.assertTrue(deserializedEmptyHll.getType() == Hll.HLL_DATA_EMPTY);
// test explicit
Hll explicitHll = new Hll();
for (int i = 0; i < Hll.HLL_EXPLICLIT_INT64_NUM; i++) {
explicitHll.updateWithHash(i);
}
Assert.assertTrue(explicitHll.getType() == Hll.HLL_DATA_EXPLICIT);
Assert.assertTrue(explicitHll.estimateCardinality() == Hll.HLL_EXPLICLIT_INT64_NUM);
Assertions.assertTrue(explicitHll.getType() == Hll.HLL_DATA_EXPLICIT);
Assertions.assertTrue(explicitHll.estimateCardinality() == Hll.HLL_EXPLICLIT_INT64_NUM);
ByteArrayOutputStream explicitOutputStream = new ByteArrayOutputStream();
DataOutput explicitOutput = new DataOutputStream(explicitOutputStream);
@ -69,16 +69,16 @@ public class HllTest {
new DataInputStream(new ByteArrayInputStream(explicitOutputStream.toByteArray()));
Hll deserializedExplicitHll = new Hll();
deserializedExplicitHll.deserialize(explicitInputStream);
Assert.assertTrue(deserializedExplicitHll.getType() == Hll.HLL_DATA_EXPLICIT);
Assertions.assertTrue(deserializedExplicitHll.getType() == Hll.HLL_DATA_EXPLICIT);
// test sparse
Hll sparseHll = new Hll();
for (int i = 0; i < Hll.HLL_SPARSE_THRESHOLD; i++) {
sparseHll.updateWithHash(i);
}
Assert.assertTrue(sparseHll.getType() == Hll.HLL_DATA_FULL);
Assertions.assertTrue(sparseHll.getType() == Hll.HLL_DATA_FULL);
// 2% error rate
Assert.assertTrue(sparseHll.estimateCardinality() > Hll.HLL_SPARSE_THRESHOLD * (1 - 0.02) &&
Assertions.assertTrue(sparseHll.estimateCardinality() > Hll.HLL_SPARSE_THRESHOLD * (1 - 0.02) &&
sparseHll.estimateCardinality() < Hll.HLL_SPARSE_THRESHOLD * (1 + 0.02));
ByteArrayOutputStream sparseOutputStream = new ByteArrayOutputStream();
@ -88,18 +88,18 @@ public class HllTest {
new DataInputStream(new ByteArrayInputStream(sparseOutputStream.toByteArray()));
Hll deserializedSparseHll = new Hll();
deserializedSparseHll.deserialize(sparseInputStream);
Assert.assertTrue(deserializedSparseHll.getType() == Hll.HLL_DATA_SPARSE);
Assert.assertTrue(sparseHll.estimateCardinality() == deserializedSparseHll.estimateCardinality());
Assertions.assertTrue(deserializedSparseHll.getType() == Hll.HLL_DATA_SPARSE);
Assertions.assertTrue(sparseHll.estimateCardinality() == deserializedSparseHll.estimateCardinality());
// test full
Hll fullHll = new Hll();
for (int i = 1; i <= Short.MAX_VALUE; i++) {
fullHll.updateWithHash(i);
}
Assert.assertTrue(fullHll.getType() == Hll.HLL_DATA_FULL);
Assertions.assertTrue(fullHll.getType() == Hll.HLL_DATA_FULL);
// the result 32748 is consistent with C++ 's implementation
Assert.assertTrue(fullHll.estimateCardinality() == 32748);
Assert.assertTrue(fullHll.estimateCardinality() > Short.MAX_VALUE * (1 - 0.02) &&
Assertions.assertTrue(fullHll.estimateCardinality() == 32748);
Assertions.assertTrue(fullHll.estimateCardinality() > Short.MAX_VALUE * (1 - 0.02) &&
fullHll.estimateCardinality() < Short.MAX_VALUE * (1 + 0.02));
ByteArrayOutputStream fullHllOutputStream = new ByteArrayOutputStream();
@ -109,8 +109,8 @@ public class HllTest {
new DataInputStream(new ByteArrayInputStream(fullHllOutputStream.toByteArray()));
Hll deserializedFullHll = new Hll();
deserializedFullHll.deserialize(fullHllInputStream);
Assert.assertTrue(deserializedFullHll.getType() == Hll.HLL_DATA_FULL);
Assert.assertTrue(deserializedFullHll.estimateCardinality() == fullHll.estimateCardinality());
Assertions.assertTrue(deserializedFullHll.getType() == Hll.HLL_DATA_FULL);
Assertions.assertTrue(deserializedFullHll.estimateCardinality() == fullHll.estimateCardinality());
}
@ -125,7 +125,7 @@ public class HllTest {
byte[] serializedByte = serializeHll(hll);
hll = deserializeHll(serializedByte);
Assert.assertTrue(estimateValue == hll.estimateCardinality());
Assertions.assertTrue(estimateValue == hll.estimateCardinality());
}
// explicit [0. 100)
@ -134,11 +134,11 @@ public class HllTest {
for (int i = 0; i < 100; i++) {
explicitHll.updateWithHash(i);
}
Assert.assertTrue(explicitHll.estimateCardinality() == 100);
Assertions.assertTrue(explicitHll.estimateCardinality() == 100);
// check serialize
byte[] serializeHll = serializeHll(explicitHll);
explicitHll = deserializeHll(serializeHll);
Assert.assertTrue(explicitHll.estimateCardinality() == 100);
Assertions.assertTrue(explicitHll.estimateCardinality() == 100);
Hll otherHll = new Hll();
for (int i = 0; i < 100; i++) {
@ -146,7 +146,7 @@ public class HllTest {
}
explicitHll.merge(otherHll);
// compare with C++ version result
Assert.assertTrue(explicitHll.estimateCardinality() == 100);
Assertions.assertTrue(explicitHll.estimateCardinality() == 100);
}
// sparse [1024, 2048)
@ -159,11 +159,11 @@ public class HllTest {
long preValue = sparseHll.estimateCardinality();
// check serialize
byte[] serializedHll = serializeHll(sparseHll);
Assert.assertTrue(serializedHll.length < Hll.HLL_REGISTERS_COUNT + 1);
Assertions.assertTrue(serializedHll.length < Hll.HLL_REGISTERS_COUNT + 1);
sparseHll = deserializeHll(serializedHll);
Assert.assertTrue(sparseHll.estimateCardinality() == preValue);
Assert.assertTrue(sparseHll.getType() == Hll.HLL_DATA_SPARSE);
Assertions.assertTrue(sparseHll.estimateCardinality() == preValue);
Assertions.assertTrue(sparseHll.getType() == Hll.HLL_DATA_SPARSE);
Hll otherHll = new Hll();
for (int i = 0; i < 1024; i++) {
@ -172,11 +172,11 @@ public class HllTest {
sparseHll.updateWithHash(1024);
sparseHll.merge(otherHll);
long cardinality = sparseHll.estimateCardinality();
Assert.assertTrue(preValue == cardinality);
Assertions.assertTrue(preValue == cardinality);
// 2% error rate
Assert.assertTrue(cardinality > 1000 && cardinality < 1045);
Assertions.assertTrue(cardinality > 1000 && cardinality < 1045);
// compare with C++ version result
Assert.assertTrue(cardinality == 1023);
Assertions.assertTrue(cardinality == 1023);
}
// full [64 * 1024, 128 * 1024)
@ -190,21 +190,21 @@ public class HllTest {
// check serialize
byte[] serializedHll = serializeHll(fullHll);
fullHll = deserializeHll(serializedHll);
Assert.assertTrue(fullHll.estimateCardinality() == preValue);
Assert.assertTrue(serializedHll.length == Hll.HLL_REGISTERS_COUNT + 1);
Assertions.assertTrue(fullHll.estimateCardinality() == preValue);
Assertions.assertTrue(serializedHll.length == Hll.HLL_REGISTERS_COUNT + 1);
// 2% error rate
Assert.assertTrue(preValue > 62 * 1024 && preValue < 66 * 1024);
Assertions.assertTrue(preValue > 62 * 1024 && preValue < 66 * 1024);
// compare with C++ version result
Assert.assertTrue(preValue == 66112);
Assertions.assertTrue(preValue == 66112);
}
// merge explicit to empty_hll
{
Hll newExplicit = new Hll();
newExplicit.merge(explicitHll);
Assert.assertTrue(newExplicit.estimateCardinality() == 100);
Assertions.assertTrue(newExplicit.estimateCardinality() == 100);
// merge another explicit
{
@ -214,16 +214,16 @@ public class HllTest {
}
// this is converted to full
otherHll.merge(newExplicit);
Assert.assertTrue(otherHll.estimateCardinality() > 190);
Assertions.assertTrue(otherHll.estimateCardinality() > 190);
// compare with C++ version result
Assert.assertTrue(otherHll.estimateCardinality() == 201);
Assertions.assertTrue(otherHll.estimateCardinality() == 201);
}
// merge full
{
newExplicit.merge(fullHll);
Assert.assertTrue(newExplicit.estimateCardinality() > fullHll.estimateCardinality());
Assertions.assertTrue(newExplicit.estimateCardinality() > fullHll.estimateCardinality());
// compare with C++ version result
Assert.assertTrue(newExplicit.estimateCardinality() == 66250);
Assertions.assertTrue(newExplicit.estimateCardinality() == 66250);
}
}
@ -231,21 +231,21 @@ public class HllTest {
{
Hll newSparseHll = new Hll();
newSparseHll.merge(sparseHll);
Assert.assertTrue(sparseHll.estimateCardinality() == newSparseHll.estimateCardinality());
Assertions.assertTrue(sparseHll.estimateCardinality() == newSparseHll.estimateCardinality());
// compare with C++ version result
Assert.assertTrue(newSparseHll.estimateCardinality() == 1023);
Assertions.assertTrue(newSparseHll.estimateCardinality() == 1023);
// merge explicit
newSparseHll.merge(explicitHll);
Assert.assertTrue(newSparseHll.estimateCardinality() > sparseHll.estimateCardinality());
Assertions.assertTrue(newSparseHll.estimateCardinality() > sparseHll.estimateCardinality());
// compare with C++ version result
Assert.assertTrue(newSparseHll.estimateCardinality() == 1123);
Assertions.assertTrue(newSparseHll.estimateCardinality() == 1123);
// merge full
newSparseHll.merge(fullHll);
Assert.assertTrue(newSparseHll.estimateCardinality() > fullHll.estimateCardinality());
Assertions.assertTrue(newSparseHll.estimateCardinality() > fullHll.estimateCardinality());
// compare with C++ version result
Assert.assertTrue(newSparseHll.estimateCardinality() == 67316);
Assertions.assertTrue(newSparseHll.estimateCardinality() == 67316);
}
}

View File

@ -19,8 +19,8 @@
package com.starrocks.load.loadv2.dpp;
import com.starrocks.load.loadv2.etl.EtlJobConfig;
import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.List;
@ -95,33 +95,33 @@ public class MinimumCoverageRollupTreeBuilderTest {
MinimumCoverageRollupTreeBuilder builder = new MinimumCoverageRollupTreeBuilder();
RollupTreeNode resultNode = builder.build(table);
Assert.assertEquals(resultNode.parent, null);
Assert.assertEquals(resultNode.indexId, 10000);
Assert.assertEquals(resultNode.level, 0);
Assert.assertEquals(resultNode.children.size(), 2);
Assertions.assertEquals(resultNode.parent, null);
Assertions.assertEquals(resultNode.indexId, 10000);
Assertions.assertEquals(resultNode.level, 0);
Assertions.assertEquals(resultNode.children.size(), 2);
RollupTreeNode index1Node = resultNode.children.get(0);
Assert.assertEquals(index1Node.parent.indexId, 10000);
Assert.assertEquals(index1Node.indexId, 10001);
Assert.assertEquals(index1Node.level, 1);
Assert.assertEquals(index1Node.children.size(), 2);
Assertions.assertEquals(index1Node.parent.indexId, 10000);
Assertions.assertEquals(index1Node.indexId, 10001);
Assertions.assertEquals(index1Node.level, 1);
Assertions.assertEquals(index1Node.children.size(), 2);
RollupTreeNode index3Node = resultNode.children.get(1);
Assert.assertEquals(index3Node.parent.indexId, 10000);
Assert.assertEquals(index3Node.indexId, 10003);
Assert.assertEquals(index3Node.level, 1);
Assert.assertEquals(index3Node.children, null);
Assertions.assertEquals(index3Node.parent.indexId, 10000);
Assertions.assertEquals(index3Node.indexId, 10003);
Assertions.assertEquals(index3Node.level, 1);
Assertions.assertEquals(index3Node.children, null);
RollupTreeNode index2Node = index1Node.children.get(0);
Assert.assertEquals(index2Node.parent.indexId, 10001);
Assert.assertEquals(index2Node.indexId, 10002);
Assert.assertEquals(index2Node.level, 2);
Assert.assertEquals(index2Node.children, null);
Assertions.assertEquals(index2Node.parent.indexId, 10001);
Assertions.assertEquals(index2Node.indexId, 10002);
Assertions.assertEquals(index2Node.level, 2);
Assertions.assertEquals(index2Node.children, null);
RollupTreeNode index4Node = index1Node.children.get(1);
Assert.assertEquals(index4Node.parent.indexId, 10001);
Assert.assertEquals(index4Node.indexId, 10004);
Assert.assertEquals(index4Node.level, 2);
Assert.assertEquals(index4Node.children, null);
Assertions.assertEquals(index4Node.parent.indexId, 10001);
Assertions.assertEquals(index4Node.indexId, 10004);
Assertions.assertEquals(index4Node.level, 2);
Assertions.assertEquals(index4Node.children, null);
}
}

View File

@ -19,8 +19,8 @@ package com.starrocks.load.loadv2.dpp;
import com.starrocks.load.loadv2.etl.EtlJobConfig;
import org.apache.spark.sql.RowFactory;
import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.math.BigDecimal;
@ -38,14 +38,14 @@ public class SparkDppTest {
DecimalParser decimalParser = new DecimalParser(etlColumn);
// test max/min
Assert.assertTrue(decimalParser.getMaxValue().toString().equals("9.99"));
Assert.assertTrue(decimalParser.getMinValue().toString().equals("-9.99"));
Assertions.assertTrue(decimalParser.getMaxValue().toString().equals("9.99"));
Assertions.assertTrue(decimalParser.getMinValue().toString().equals("-9.99"));
// normal
BigDecimal bigDecimal = new BigDecimal("1.21");
Assert.assertTrue(sparkDpp.validateData(bigDecimal, etlColumn, decimalParser, RowFactory.create(bigDecimal)));
Assertions.assertTrue(sparkDpp.validateData(bigDecimal, etlColumn, decimalParser, RowFactory.create(bigDecimal)));
// failed
BigDecimal bigDecimalFailed = new BigDecimal("10");
Assert.assertFalse(
Assertions.assertFalse(
sparkDpp.validateData(bigDecimalFailed, etlColumn, decimalParser, RowFactory.create(bigDecimalFailed)));
// string
@ -55,15 +55,15 @@ public class SparkDppTest {
StringParser stringParser = new StringParser(stringColumn);
// normal
String normalString = "a1";
Assert.assertTrue(
Assertions.assertTrue(
sparkDpp.validateData(normalString, stringColumn, stringParser, RowFactory.create(normalString)));
// cn normal
String normalStringCN = "";
Assert.assertTrue(
Assertions.assertTrue(
sparkDpp.validateData(normalStringCN, stringColumn, stringParser, RowFactory.create(normalStringCN)));
// cn failed
String failedStringCN = "中a";
Assert.assertFalse(
Assertions.assertFalse(
sparkDpp.validateData(failedStringCN, stringColumn, stringParser, RowFactory.create(failedStringCN)));
}

View File

@ -15,8 +15,8 @@
package com.starrocks.load.loadv2.dpp;
import com.starrocks.types.BitmapValue;
import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class SparkRDDAggregatorTest {
@ -25,25 +25,25 @@ public class SparkRDDAggregatorTest {
// init null
BitmapUnionAggregator aggregator = new BitmapUnionAggregator();
BitmapValue value = aggregator.init(null);
Assert.assertEquals(BitmapValue.EMPTY, value.getBitmapType());
Assertions.assertEquals(BitmapValue.EMPTY, value.getBitmapType());
// init normal value 1
aggregator = new BitmapUnionAggregator();
value = aggregator.init(1);
Assert.assertEquals(BitmapValue.SINGLE_VALUE, value.getBitmapType());
Assert.assertEquals("{1}", value.toString());
Assertions.assertEquals(BitmapValue.SINGLE_VALUE, value.getBitmapType());
Assertions.assertEquals("{1}", value.toString());
// init byte[]
byte[] bytes = new byte[] {1, 1, 0, 0, 0};
value = aggregator.init(bytes);
Assert.assertEquals(BitmapValue.SINGLE_VALUE, value.getBitmapType());
Assert.assertEquals("{1}", value.toString());
Assertions.assertEquals(BitmapValue.SINGLE_VALUE, value.getBitmapType());
Assertions.assertEquals("{1}", value.toString());
}
@Test
public void testHllUnionAggregator() {
HllUnionAggregator aggregator = new HllUnionAggregator();
Hll value = aggregator.init(null);
Assert.assertEquals(Hll.HLL_DATA_EMPTY, value.getType());
Assertions.assertEquals(Hll.HLL_DATA_EMPTY, value.getType());
}
}

View File

@ -3,8 +3,8 @@
package com.starrocks.load.loadv2.dpp;
import com.starrocks.load.loadv2.etl.EtlJobConfig;
import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.Arrays;
@ -60,14 +60,14 @@ public class StarRocksListPartitionerTest {
StarRocksListPartitioner listPartitioner =
new StarRocksListPartitioner(partitionInfo, partitionKeyIndexes, partitionListKeys);
int num = listPartitioner.numPartitions();
Assert.assertEquals(3, num);
Assertions.assertEquals(3, num);
List<Object> fields1 = new ArrayList<>();
fields1.add(-100);
fields1.add("name");
DppColumns record1 = new DppColumns(fields1);
int id1 = listPartitioner.getPartition(record1);
Assert.assertEquals(-1, id1);
Assertions.assertEquals(-1, id1);
List<Object> fields2 = new ArrayList<>();
fields2.add("2023-01-01");
@ -75,7 +75,7 @@ public class StarRocksListPartitionerTest {
fields2.add("123455");
DppColumns record2 = new DppColumns(fields2);
int id2 = listPartitioner.getPartition(record2);
Assert.assertEquals(1, id2);
Assertions.assertEquals(1, id2);
List<Object> fields3 = new ArrayList<>();
fields3.add("cn");
@ -83,7 +83,7 @@ public class StarRocksListPartitionerTest {
fields3.add("123455");
DppColumns record3 = new DppColumns(fields3);
int id3 = listPartitioner.getPartition(record3);
Assert.assertEquals(-1, id3);
Assertions.assertEquals(-1, id3);
List<Object> fields4 = new ArrayList<>();
fields4.add("2022-02-01");
@ -91,7 +91,7 @@ public class StarRocksListPartitionerTest {
fields4.add("123455");
DppColumns record4 = new DppColumns(fields4);
int id4 = listPartitioner.getPartition(record4);
Assert.assertEquals(2, id4);
Assertions.assertEquals(2, id4);
}
}

View File

@ -19,8 +19,8 @@
package com.starrocks.load.loadv2.dpp;
import com.starrocks.load.loadv2.etl.EtlJobConfig;
import org.junit.Assert;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.List;
@ -75,49 +75,49 @@ public class StarRocksRangePartitionerTest {
StarRocksRangePartitioner rangePartitioner =
new StarRocksRangePartitioner(partitionInfo, partitionKeyIndexes, partitionRangeKeys);
int num = rangePartitioner.numPartitions();
Assert.assertEquals(3, num);
Assertions.assertEquals(3, num);
List<Object> fields1 = new ArrayList<>();
fields1.add(-100);
fields1.add("name");
DppColumns record1 = new DppColumns(fields1);
int id1 = rangePartitioner.getPartition(record1);
Assert.assertEquals(-1, id1);
Assertions.assertEquals(-1, id1);
List<Object> fields2 = new ArrayList<>();
fields2.add(10);
fields2.add("name");
DppColumns record2 = new DppColumns(fields2);
int id2 = rangePartitioner.getPartition(record2);
Assert.assertEquals(0, id2);
Assertions.assertEquals(0, id2);
List<Object> fields3 = new ArrayList<>();
fields3.add(110);
fields3.add("name");
DppColumns record3 = new DppColumns(fields3);
int id3 = rangePartitioner.getPartition(record3);
Assert.assertEquals(1, id3);
Assertions.assertEquals(1, id3);
List<Object> fields4 = new ArrayList<>();
fields4.add(210);
fields4.add("name");
DppColumns record4 = new DppColumns(fields4);
int id4 = rangePartitioner.getPartition(record4);
Assert.assertEquals(2, id4);
Assertions.assertEquals(2, id4);
List<Object> fields5 = new ArrayList<>();
fields5.add(310);
fields5.add("name");
DppColumns record5 = new DppColumns(fields5);
int id5 = rangePartitioner.getPartition(record5);
Assert.assertEquals(-1, id5);
Assertions.assertEquals(-1, id5);
List<Object> fields6 = new ArrayList<>();
fields6.add(null);
fields6.add("name");
DppColumns record6 = new DppColumns(fields6);
int id6 = rangePartitioner.getPartition(record6);
Assert.assertEquals(-1, id6);
Assertions.assertEquals(-1, id6);
}
@Test
@ -159,14 +159,14 @@ public class StarRocksRangePartitionerTest {
StarRocksRangePartitioner rangePartitioner =
new StarRocksRangePartitioner(partitionInfo, partitionKeyIndexes, partitionRangeKeys);
int num = rangePartitioner.numPartitions();
Assert.assertEquals(2, num);
Assertions.assertEquals(2, num);
List<Object> fields1 = new ArrayList<>();
fields1.add(null);
fields1.add("name");
DppColumns record1 = new DppColumns(fields1);
int id1 = rangePartitioner.getPartition(record1);
Assert.assertEquals(0, id1);
Assertions.assertEquals(0, id1);
}
@Test
@ -183,13 +183,13 @@ public class StarRocksRangePartitionerTest {
partitionKeyIndexes.add(0);
StarRocksRangePartitioner rangePartitioner = new StarRocksRangePartitioner(partitionInfo, partitionKeyIndexes, null);
int num = rangePartitioner.numPartitions();
Assert.assertEquals(1, num);
Assertions.assertEquals(1, num);
List<Object> fields = new ArrayList<>();
fields.add(100);
fields.add("name");
DppColumns record = new DppColumns(fields);
int id = rangePartitioner.getPartition(record);
Assert.assertEquals(0, id);
Assertions.assertEquals(0, id);
}
}

View File

@ -33,9 +33,9 @@ import mockit.Injectable;
import mockit.Mocked;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.SparkSession;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.Map;
@ -49,7 +49,7 @@ public class SparkEtlJobTest {
private long partition2Id;
private EtlJobConfig etlJobConfig;
@Before
@BeforeEach
public void setUp() {
tableId = 0L;
index1Id = 1L;
@ -108,12 +108,12 @@ public class SparkEtlJobTest {
Deencapsulation.invoke(job, "initSparkEnvironment");
Deencapsulation.invoke(job, "initConfig");
EtlJobConfig parsedConfig = Deencapsulation.getField(job, "etlJobConfig");
Assert.assertTrue(parsedConfig.tables.containsKey(tableId));
Assertions.assertTrue(parsedConfig.tables.containsKey(tableId));
EtlTable table = parsedConfig.tables.get(tableId);
Assert.assertEquals(2, table.indexes.size());
Assert.assertEquals(2, table.partitionInfo.partitions.size());
Assert.assertEquals(false, parsedConfig.properties.strictMode);
Assert.assertEquals("label0", parsedConfig.label);
Assertions.assertEquals(2, table.indexes.size());
Assertions.assertEquals(2, table.partitionInfo.partitions.size());
Assertions.assertEquals(false, parsedConfig.properties.strictMode);
Assertions.assertEquals("label0", parsedConfig.label);
}
@Test
@ -123,7 +123,7 @@ public class SparkEtlJobTest {
Deencapsulation.invoke(job, "checkConfig");
Map<Long, Set<String>> tableToBitmapDictColumns = Deencapsulation.getField(job, "tableToBitmapDictColumns");
// check bitmap dict columns empty
Assert.assertTrue(tableToBitmapDictColumns.isEmpty());
Assertions.assertTrue(tableToBitmapDictColumns.isEmpty());
}
@Test
@ -142,12 +142,12 @@ public class SparkEtlJobTest {
Deencapsulation.invoke(job, "checkConfig");
// check hive source
Set<Long> hiveSourceTables = Deencapsulation.getField(job, "hiveSourceTables");
Assert.assertTrue(hiveSourceTables.contains(tableId));
Assertions.assertTrue(hiveSourceTables.contains(tableId));
// check bitmap dict columns has v2
Map<Long, Set<String>> tableToBitmapDictColumns = Deencapsulation.getField(job, "tableToBitmapDictColumns");
Assert.assertTrue(tableToBitmapDictColumns.containsKey(tableId));
Assert.assertTrue(tableToBitmapDictColumns.get(tableId).contains("v2"));
Assertions.assertTrue(tableToBitmapDictColumns.containsKey(tableId));
Assertions.assertTrue(tableToBitmapDictColumns.get(tableId).contains("v2"));
// check remove v2 bitmap_dict func mapping from file group column mappings
Assert.assertFalse(table.fileGroups.get(0).columnMappings.containsKey("v2"));
Assertions.assertFalse(table.fileGroups.get(0).columnMappings.containsKey("v2"));
}
}

View File

@ -47,7 +47,7 @@ under the License.
<thrift.version>0.14.1</thrift.version>
<tomcat.version>9.0.99</tomcat.version>
<log4j.version>2.17.1</log4j.version>
<jackson.version>2.13.4.2</jackson.version>
<jackson.version>2.18.3</jackson.version>
<avro.version>1.11.4</avro.version>
</properties>

View File

@ -48,7 +48,7 @@
<gcs.connector.version>hadoop3-2.2.26</gcs.connector.version>
<slf4j.version>1.7.36</slf4j.version>
<arrow.version>17.0.0</arrow.version>
<jackson.version>2.12.7.2</jackson.version>
<jackson.version>2.19.1</jackson.version>
<guava.version>32.0.1-jre</guava.version>
<!-- hadoop-azure requires no more than jetty10+ -->
<!-- https://stackoverflow.com/questions/66713254/spark-wasb-and-jetty-11 -->
@ -356,6 +356,16 @@
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<!-- jackson -->
<!-- apache arrow -->

View File

@ -19,7 +19,6 @@
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.13.4.2</version>
</dependency>
</dependencies>