[Refactor] Remove hardcoded version numbers in build.gradle.kts and pom.xml (#62987)

This commit is contained in:
Harbor Liu 2025-09-11 16:14:51 +08:00 committed by GitHub
parent ac256e0ec1
commit 241a3e5451
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
20 changed files with 39 additions and 43 deletions

View File

@ -557,14 +557,14 @@ if [ ${BUILD_FE} -eq 1 -o ${BUILD_SPARK_DPP} -eq 1 ]; then
cp -r -p ${STARROCKS_HOME}/java-extensions/hadoop-ext/target/starrocks-hadoop-ext.jar ${STARROCKS_OUTPUT}/fe/lib/
cp -r -p ${STARROCKS_HOME}/webroot/* ${STARROCKS_OUTPUT}/fe/webroot/
cp -r -p ${STARROCKS_HOME}/fe/plugin/spark-dpp/target/spark-dpp-*-jar-with-dependencies.jar ${STARROCKS_OUTPUT}/fe/spark-dpp/
cp -r -p ${STARROCKS_HOME}/fe/plugin/hive-udf/target/hive-udf-1.0.0.jar ${STARROCKS_OUTPUT}/fe/hive-udf/
cp -r -p ${STARROCKS_HOME}/fe/plugin/hive-udf/target/hive-udf-*.jar ${STARROCKS_OUTPUT}/fe/hive-udf/
cp -r -p ${STARROCKS_THIRDPARTY}/installed/async-profiler ${STARROCKS_OUTPUT}/fe/bin/
MSG="${MSG}${MSG_FE}"
elif [ ${BUILD_SPARK_DPP} -eq 1 ]; then
install -d ${STARROCKS_OUTPUT}/fe/spark-dpp/
rm -rf ${STARROCKS_OUTPUT}/fe/spark-dpp/*
cp -r -p ${STARROCKS_HOME}/fe/plugin/spark-dpp/target/spark-dpp-*-jar-with-dependencies.jar ${STARROCKS_OUTPUT}/fe/spark-dpp/
cp -r -p ${STARROCKS_HOME}/fe/plugin/hive-udf/target/hive-udf-1.0.0.jar ${STARROCKS_HOME}/fe/hive-udf/
cp -r -p ${STARROCKS_HOME}/fe/plugin/hive-udf/target/hive-udf-*.jar ${STARROCKS_OUTPUT}/fe/hive-udf/
MSG="${MSG}${MSG_DPP}"
fi
fi

View File

@ -69,18 +69,18 @@ Supported source and target data types:
./build.sh --hive-udf
```
A JAR package `hive-udf-1.0.0.jar` will be generated in the `fe/hive-udf/` directory.
A JAR package `hive-udf-*.jar` will be generated in the `fe/hive-udf/` directory.
2. Upload the JAR package to HDFS.
```bash
hadoop fs -put -f ./hive-udf-1.0.0.jar hdfs://<hdfs_ip>:<hdfs_port>/hive-udf-1.0.0.jar
hadoop fs -put -f ./hive-udf-*.jar hdfs://<hdfs_ip>:<hdfs_port>/hive-udf-*.jar
```
3. Load the JAR package to Hive.
```bash
hive> add jar hdfs://<hdfs_ip>:<hdfs_port>/hive-udf-1.0.0.jar;
hive> add jar hdfs://<hdfs_ip>:<hdfs_port>/hive-udf-*.jar;
```
4. Load UDF functions.

View File

@ -69,18 +69,18 @@ Hive Bitmap UDF で定義された Bitmap フォーマットは、StarRocks の
./build.sh --hive-udf
```
`fe/hive-udf/` ディレクトリに JAR パッケージ `hive-udf-1.0.0.jar` が生成されます。
`fe/hive-udf/` ディレクトリに JAR パッケージ `hive-udf-*.jar` が生成されます。
2. JAR パッケージを HDFS にアップロードします。
```bash
hadoop fs -put -f ./hive-udf-1.0.0.jar hdfs://<hdfs_ip>:<hdfs_port>/hive-udf-1.0.0.jar
hadoop fs -put -f ./hive-udf-*.jar hdfs://<hdfs_ip>:<hdfs_port>/hive-udf-*.jar
```
3. JAR パッケージを Hive にロードします。
```bash
hive> add jar hdfs://<hdfs_ip>:<hdfs_port>/hive-udf-1.0.0.jar;
hive> add jar hdfs://<hdfs_ip>:<hdfs_port>/hive-udf-*.jar;
```
4. UDF 関数をロードします。

View File

@ -69,18 +69,18 @@ UDF 定义的 Bitmap 格式与 StarRocks 里格式一致,可直接用于导入
./build.sh --hive-udf
```
会在 `fe/hive-udf/` 目录下生成一个 JAR 包 `hive-udf-1.0.0.jar`。
会在 `fe/hive-udf/` 目录下生成一个 JAR 包 `hive-udf-*.jar`。
2. 将 JAR 包上传到 HDFS。
```bash
hadoop fs -put -f ./hive-udf-1.0.0.jar hdfs://<hdfs_ip>:<hdfs_port>/hive-udf-1.0.0.jar
hadoop fs -put -f ./hive-udf-*.jar hdfs://<hdfs_ip>:<hdfs_port>/hive-udf-*.jar
```
3. Hive 里加载 JAR 包。
```bash
hive> add jar hdfs://<hdfs_ip>:<hdfs_port>/hive-udf-1.0.0.jar;
hive> add jar hdfs://<hdfs_ip>:<hdfs_port>/hive-udf-*.jar;
```
4. 加载 UDF 函数。

View File

@ -119,11 +119,11 @@ subprojects {
implementation("com.qcloud:chdfs_hadoop_plugin_network:3.2")
implementation("com.squareup.okhttp3:okhttp:4.10.0")
implementation("com.squareup.okio:okio:3.4.0")
implementation("com.starrocks:fe-testing:1.0.0")
implementation("com.starrocks:hive-udf:1.0.0")
implementation("com.starrocks:fe-testing:${project.version}")
implementation("com.starrocks:hive-udf:${project.version}")
implementation("com.starrocks:jprotobuf-starrocks:${project.ext["jprotobuf-starrocks.version"]}")
implementation("com.starrocks:fe-utils:1.0.0")
implementation("com.starrocks:spark-dpp:1.0.0")
implementation("com.starrocks:fe-utils:${project.version}")
implementation("com.starrocks:spark-dpp:${project.version}")
implementation("com.starrocks:starclient:${project.ext["staros.version"]}")
implementation("com.starrocks:starmanager:${project.ext["staros.version"]}")
implementation("com.starrocks:starrocks-bdb-je:18.3.20")

View File

@ -31,7 +31,6 @@ under the License.
</parent>
<artifactId>fe-core</artifactId>
<version>3.4.0</version>
<packaging>jar</packaging>
<properties>
@ -100,19 +99,16 @@ under the License.
<dependency>
<groupId>com.starrocks</groupId>
<artifactId>fe-grammar</artifactId>
<version>1.0.0</version>
</dependency>
<dependency>
<groupId>com.starrocks</groupId>
<artifactId>fe-parser</artifactId>
<version>1.0.0</version>
</dependency>
<dependency>
<groupId>com.starrocks</groupId>
<artifactId>fe-spi</artifactId>
<version>1.0.0</version>
</dependency>
<dependency>

View File

@ -11,7 +11,6 @@
</parent>
<artifactId>fe-grammar</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<properties>

View File

@ -23,7 +23,6 @@ java {
}
group = "com.starrocks"
version = "1.0.0"
dependencies {
implementation("org.antlr:antlr4-runtime")

View File

@ -11,7 +11,6 @@
</parent>
<artifactId>fe-parser</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<properties>

View File

@ -22,7 +22,6 @@ java {
}
group = "com.starrocks"
version = "1.0.0"
dependencies {
implementation("com.google.guava:guava")

View File

@ -12,7 +12,6 @@
</parent>
<artifactId>fe-spi</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<properties>

View File

@ -23,7 +23,6 @@ java {
}
group = "com.starrocks"
version = "1.0.0"
// Note: There are no explicit dependencies in the original pom.xml for this module

View File

@ -31,7 +31,6 @@ under the License.
</parent>
<artifactId>fe-testing</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<properties>

View File

@ -22,7 +22,6 @@ java {
}
group = "com.starrocks"
version = "1.0.0"
dependencies {
implementation("com.google.guava:guava")

View File

@ -29,7 +29,6 @@
</parent>
<artifactId>fe-utils</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<properties>

View File

@ -17,7 +17,6 @@ plugins {
id("com.github.johnrengelman.shadow") version "8.1.1"
}
version = "1.0.0"
java {
sourceCompatibility = JavaVersion.VERSION_1_8

View File

@ -11,7 +11,6 @@
<modelVersion>4.0.0</modelVersion>
<artifactId>hive-udf</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<dependencies>

View File

@ -23,7 +23,6 @@ java {
}
group = "com.starrocks"
version = "1.0.0"
// Property equivalent to fe_ut_parallel in Maven
val feUtParallel = project.findProperty("fe_ut_parallel") ?: "1"

View File

@ -31,7 +31,6 @@ under the License.
</parent>
<artifactId>spark-dpp</artifactId>
<version>1.0.0</version>
<packaging>jar</packaging>
<properties>

View File

@ -178,28 +178,41 @@ under the License.
<dependencyManagement>
<dependencies>
<!-- internal modules unified -->
<dependency>
<groupId>com.starrocks</groupId>
<artifactId>fe-grammar</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.starrocks</groupId>
<artifactId>fe-parser</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.starrocks</groupId>
<artifactId>fe-spi</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.starrocks</groupId>
<artifactId>fe-utils</artifactId>
<version>1.0.0</version>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.starrocks</groupId>
<artifactId>hive-udf</artifactId>
<version>1.0.0</version>
</dependency>
<dependency>
<groupId>com.starrocks</groupId>
<artifactId>fe-testing</artifactId>
<version>1.0.0</version>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.starrocks</groupId>
<artifactId>spark-dpp</artifactId>
<version>1.0.0</version>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.starrocks</groupId>
<artifactId>hive-udf</artifactId>
<version>${project.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-cli/commons-cli -->