[Enhancement] Add gradle build for FE (#60933)
Signed-off-by: Binglin Chang <decstery@gmail.com>
This commit is contained in:
parent
0fc010c5be
commit
b6e09e6bd0
|
|
@ -0,0 +1,348 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2021-present StarRocks, Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https:#www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
A Python script to parse a set of Maven pom.xml files from a multi-module
|
||||
project. It extracts version properties, dependency management information,
|
||||
submodule dependencies, and any dependency exclusions. The result is then
|
||||
output in a structured JSON format.
|
||||
|
||||
It can also sync this information into corresponding build.gradle.kts files.
|
||||
|
||||
under dir {root}/fe, run:
|
||||
|
||||
Usage for Parsing:
|
||||
python sync_pom_to_gradle.py
|
||||
|
||||
Usage for Syncing to Gradle:
|
||||
python sync_pom_to_gradle.py --sync-gradle
|
||||
"""
|
||||
|
||||
import sys
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
# --- XML Parsing Logic (from previous steps) ---
|
||||
|
||||
POM_NAMESPACE = {'m': 'http://maven.apache.org/POM/4.0.0'}
|
||||
|
||||
def get_child_text(element, tag_name, namespace_map):
|
||||
child = element.find(tag_name, namespace_map)
|
||||
return child.text.strip() if child is not None and child.text else None
|
||||
|
||||
def parse_properties(root, ns):
|
||||
properties_node = root.find('m:properties', ns)
|
||||
if properties_node is None:
|
||||
return None
|
||||
vars_list = []
|
||||
for prop in properties_node:
|
||||
key = prop.tag.replace(f'{{{ns["m"]}}}', '')
|
||||
value = prop.text
|
||||
if key and value:
|
||||
vars_list.append({'name': key, 'version': value.strip()})
|
||||
return vars_list if vars_list else None
|
||||
|
||||
def _parse_exclusions(dependency_node, ns):
|
||||
exclusions_node = dependency_node.find('m:exclusions', ns)
|
||||
if exclusions_node is None:
|
||||
return None
|
||||
exclusions_list = []
|
||||
for exclusion_node in exclusions_node.findall('m:exclusion', ns):
|
||||
group_id = get_child_text(exclusion_node, 'm:groupId', ns)
|
||||
artifact_id = get_child_text(exclusion_node, 'm:artifactId', ns)
|
||||
if group_id and artifact_id:
|
||||
exclusions_list.append(f"{group_id}:{artifact_id}")
|
||||
return exclusions_list if exclusions_list else None
|
||||
|
||||
def _parse_dependency_list(dependency_nodes, ns):
|
||||
deps_dict = {}
|
||||
for dep_node in dependency_nodes:
|
||||
group_id = get_child_text(dep_node, 'm:groupId', ns)
|
||||
artifact_id = get_child_text(dep_node, 'm:artifactId', ns)
|
||||
if not group_id or not artifact_id:
|
||||
continue
|
||||
key = f"{group_id}:{artifact_id}"
|
||||
dep_info = {}
|
||||
optional_fields = ['version', 'type', 'scope', 'classifier']
|
||||
for field in optional_fields:
|
||||
value = get_child_text(dep_node, f'm:{field}', ns)
|
||||
if value:
|
||||
dep_info[field] = value
|
||||
exclusions = _parse_exclusions(dep_node, ns)
|
||||
if exclusions:
|
||||
dep_info['exclusions'] = exclusions
|
||||
deps_dict[key] = dep_info
|
||||
return deps_dict if deps_dict else None
|
||||
|
||||
def parse_dependency_management(root, ns):
|
||||
dep_man_nodes = root.findall('m:dependencyManagement/m:dependencies/m:dependency', ns)
|
||||
return _parse_dependency_list(dep_man_nodes, ns) if dep_man_nodes else None
|
||||
|
||||
def parse_dependencies(root, ns):
|
||||
dep_nodes = root.findall('m:dependencies/m:dependency', ns)
|
||||
return _parse_dependency_list(dep_nodes, ns) if dep_nodes else None
|
||||
|
||||
|
||||
# --- Gradle Syncing Logic ---
|
||||
|
||||
def _replace_content_between_markers(content, start_marker, end_marker, new_block):
|
||||
"""
|
||||
Replaces content within a file between start and end markers.
|
||||
The markers themselves are preserved.
|
||||
"""
|
||||
# Pattern to find the block, including the markers themselves.
|
||||
# It captures the start marker line and the end marker line to preserve them.
|
||||
pattern = re.compile(
|
||||
f'({re.escape(start_marker)}\\n).*?(\\n\\s*{re.escape(end_marker)})',
|
||||
re.DOTALL
|
||||
)
|
||||
# The replacement consists of the captured start marker, the new content,
|
||||
# and the captured end marker.
|
||||
replacement = f'\\1{new_block}\\2'
|
||||
|
||||
# Perform the substitution
|
||||
new_content, count = pattern.subn(replacement, content)
|
||||
|
||||
if count == 0:
|
||||
print(f"Warning: Markers '{start_marker}' and '{end_marker}' not found. No changes made to this block.", file=sys.stderr)
|
||||
return content # Return original content if markers not found
|
||||
|
||||
return new_content
|
||||
|
||||
def _generate_gradle_vars_string(vars_list, indent=" "):
|
||||
"""
|
||||
Generates the body of the `ext { ... }` block for Gradle.
|
||||
"""
|
||||
if not vars_list:
|
||||
return ""
|
||||
|
||||
# Filter out properties that are usually handled manually in Gradle
|
||||
vars_to_skip = {
|
||||
'starrocks.home', 'project.build.sourceEncoding', 'skip.plugin',
|
||||
'sonar.organization', 'sonar.host.url'
|
||||
}
|
||||
|
||||
lines = []
|
||||
for var in vars_list:
|
||||
name = var['name']
|
||||
version = var['version']
|
||||
if name not in vars_to_skip:
|
||||
# Escape backslashes and double quotes in version string
|
||||
escaped_version = version.replace('\\', '\\\\').replace('"', '\\"')
|
||||
lines.append(f'{indent}set("{name}", "{escaped_version}")')
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _resolve_gradle_version(version_str, vars_map):
|
||||
"""
|
||||
Translates a Maven version string (literal or variable) to a Gradle version string.
|
||||
e.g., "${jackson.version}" -> '"${project.ext["jackson.version"]}"'
|
||||
"""
|
||||
if not version_str:
|
||||
return '""' # Should not happen in constraints, but defensive
|
||||
|
||||
match = re.match(r"\$\{(.*)\}", version_str)
|
||||
if match:
|
||||
var_name = match.group(1)
|
||||
return f'${{project.ext["{var_name}"]}}'
|
||||
else:
|
||||
# It's a literal version
|
||||
return version_str
|
||||
|
||||
def _generate_gradle_deps_string(deps_dict, vars_map, is_constraints, dep_management, indent=" "):
|
||||
"""
|
||||
Generates the body of the `dependencies` or `constraints` block for Gradle.
|
||||
"""
|
||||
if not deps_dict:
|
||||
return ""
|
||||
|
||||
lines = []
|
||||
for key, info in sorted(deps_dict.items()):
|
||||
# Skip BOMs (Bill of Materials), as they are handled with platform()
|
||||
if info.get('type') == 'pom':
|
||||
continue
|
||||
|
||||
if key == 'com.starrocks:jprotobuf-starrocks' and not is_constraints:
|
||||
# special handling manually
|
||||
continue
|
||||
|
||||
merged_info = dep_management.get(key, {}).copy() if dep_management else {}
|
||||
merged_info.update(info)
|
||||
|
||||
# Map Maven scope to Gradle configuration
|
||||
scope = merged_info.get('scope')
|
||||
if is_constraints or key == 'com.starrocks:jprotobuf-starrocks':
|
||||
config = "implementation"
|
||||
elif scope == 'test':
|
||||
config = 'testImplementation'
|
||||
elif scope == 'provided':
|
||||
config = 'compileOnly'
|
||||
elif scope == 'runtime':
|
||||
config = 'runtimeOnly'
|
||||
else:
|
||||
config = 'implementation'
|
||||
|
||||
version_str = info.get('version')
|
||||
version_part = _resolve_gradle_version(version_str, vars_map) if version_str else ""
|
||||
|
||||
# Build the core dependency string: group:artifact:version
|
||||
dep_string = f"{key}"
|
||||
if version_part and version_part != '""':
|
||||
dep_string += f":{version_part}"
|
||||
|
||||
# Add classifier if it exists
|
||||
# classifier = merged_info.get('classifier')
|
||||
# if classifier:
|
||||
# if not version_part or version_part == '""':
|
||||
# # Maven allows classifier without version, Gradle needs a placeholder
|
||||
# dep_string += ":" # Add empty version part
|
||||
# dep_string += f":{classifier}"
|
||||
|
||||
line = f'{indent}{config}("{dep_string}")'
|
||||
|
||||
# Handle exclusions
|
||||
if 'exclusions' in merged_info and not is_constraints:
|
||||
line += " {\n"
|
||||
for exclusion in merged_info['exclusions']:
|
||||
group, module = exclusion.split(':', 1)
|
||||
line += f'{indent} exclude(group = "{group}", module = "{module}")\n'
|
||||
line += f'{indent}}}'
|
||||
lines.append(line)
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def sync_pom_to_gradle(pom_file, pom_data, dep_management=None):
|
||||
"""
|
||||
Reads a build.gradle.kts file and replaces the content between markers
|
||||
with information parsed from the pom_data.
|
||||
"""
|
||||
gradle_file = os.path.join(os.path.dirname(pom_file) or '.', 'build.gradle.kts')
|
||||
if not os.path.exists(gradle_file):
|
||||
# This is not an error, as not all modules may have a gradle file
|
||||
return
|
||||
|
||||
print(f"--- Syncing {pom_file} to {gradle_file} ---")
|
||||
|
||||
try:
|
||||
with open(gradle_file, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
except IOError as e:
|
||||
print(f"Error reading gradle file {gradle_file}: {e}", file=sys.stderr)
|
||||
return
|
||||
|
||||
original_content = content
|
||||
vars_map = {v['name']: v['version'] for v in pom_data.get('vars', [])}
|
||||
|
||||
# Sync variables
|
||||
if 'vars' in pom_data:
|
||||
# sort list by name, and only include vars ending in ".version"
|
||||
pom_vars = sorted([v for v in pom_data['vars'] if v['name'].endswith('.version')], key=lambda x: x['name'])
|
||||
if len(pom_vars) > 0:
|
||||
new_vars_block = _generate_gradle_vars_string(pom_vars)
|
||||
content = _replace_content_between_markers(
|
||||
content,
|
||||
'// var sync start',
|
||||
'// var sync end',
|
||||
new_vars_block
|
||||
)
|
||||
|
||||
# Sync dependencyManagement to the `constraints` block
|
||||
if 'dependencyManagement' in pom_data:
|
||||
new_deps_block = _generate_gradle_deps_string(
|
||||
pom_data['dependencyManagement'], vars_map, is_constraints=True, dep_management=dep_management
|
||||
)
|
||||
content = _replace_content_between_markers(
|
||||
content,
|
||||
'// dependency sync start',
|
||||
'// dependency sync end',
|
||||
new_deps_block
|
||||
)
|
||||
|
||||
# Sync dependencies (for submodules)
|
||||
if 'dependency' in pom_data:
|
||||
new_deps_block = _generate_gradle_deps_string(
|
||||
pom_data['dependency'], vars_map, is_constraints=False, dep_management=dep_management, indent=" "
|
||||
)
|
||||
content = _replace_content_between_markers(
|
||||
content,
|
||||
'// dependency sync start',
|
||||
'// dependency sync end',
|
||||
new_deps_block
|
||||
)
|
||||
|
||||
# Write back to the file only if changes were made
|
||||
if content != original_content:
|
||||
try:
|
||||
with open(gradle_file, 'w', encoding='utf-8') as f:
|
||||
f.write(content)
|
||||
print(f"Successfully updated {gradle_file}")
|
||||
except IOError as e:
|
||||
print(f"Error writing to gradle file {gradle_file}: {e}", file=sys.stderr)
|
||||
else:
|
||||
print(f"No changes needed for {gradle_file}")
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Main entry point. Parses CLI arguments and orchestrates the parsing
|
||||
and optional syncing.
|
||||
"""
|
||||
args = sys.argv[1:]
|
||||
|
||||
sync_mode = False
|
||||
if '--sync-gradle' in args:
|
||||
sync_mode = True
|
||||
args.remove('--sync-gradle')
|
||||
|
||||
parent_dep_management = None
|
||||
all_poms_data = {}
|
||||
for pom_file in ['pom.xml', 'fe-core/pom.xml']:
|
||||
try:
|
||||
tree = ET.parse(pom_file)
|
||||
root = tree.getroot()
|
||||
except FileNotFoundError:
|
||||
print(f"Error: File not found: {pom_file}", file=sys.stderr)
|
||||
continue
|
||||
except ET.ParseError as e:
|
||||
print(f"Error: Could not parse XML file '{pom_file}': {e}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
pom_data = {}
|
||||
properties = parse_properties(root, POM_NAMESPACE)
|
||||
if properties: pom_data['vars'] = properties
|
||||
|
||||
dep_management = parse_dependency_management(root, POM_NAMESPACE)
|
||||
if dep_management:
|
||||
pom_data['dependencyManagement'] = dep_management
|
||||
parent_dep_management = dep_management
|
||||
|
||||
dependencies = parse_dependencies(root, POM_NAMESPACE)
|
||||
if dependencies: pom_data['dependency'] = dependencies
|
||||
|
||||
if pom_data:
|
||||
all_poms_data[pom_file] = pom_data
|
||||
if sync_mode:
|
||||
if pom_file == 'pom.xml':
|
||||
sync_pom_to_gradle(pom_file, pom_data, parent_dep_management)
|
||||
else:
|
||||
sync_pom_to_gradle(pom_file, pom_data, parent_dep_management)
|
||||
|
||||
if not sync_mode:
|
||||
print(json.dumps(all_poms_data, indent=4))
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -0,0 +1,249 @@
|
|||
// Copyright 2021-present StarRocks, Inc. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
plugins {
|
||||
java
|
||||
`maven-publish`
|
||||
}
|
||||
|
||||
allprojects {
|
||||
group = "com.starrocks"
|
||||
version = "3.4.0"
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
maven { url = uri("https://repository.cloudera.com/repository/public/") }
|
||||
maven { url = uri("https://repository.cloudera.com/repository/cloudera-repos/") }
|
||||
maven { url = uri("https://mirror.iscas.ac.cn/kunpeng/maven/") }
|
||||
}
|
||||
}
|
||||
|
||||
subprojects {
|
||||
apply {
|
||||
plugin("java")
|
||||
plugin("maven-publish")
|
||||
}
|
||||
|
||||
// Common properties from pom.xml
|
||||
ext {
|
||||
set("starrocks.home", "${rootDir}/../")
|
||||
// var sync start
|
||||
set("arrow.version", "18.0.0")
|
||||
set("async-profiler.version", "4.0")
|
||||
set("avro.version", "1.12.0")
|
||||
set("aws-v2-sdk.version", "2.29.52")
|
||||
set("azure.version", "1.2.34")
|
||||
set("byteman.version", "4.0.24")
|
||||
set("commons-beanutils.version", "1.11.0")
|
||||
set("delta-kernel.version", "4.0.0rc1")
|
||||
set("dlf-metastore-client.version", "0.2.14")
|
||||
set("dnsjava.version", "3.6.3")
|
||||
set("fastutil.version", "8.5.15")
|
||||
set("gcs.connector.version", "hadoop3-2.2.26")
|
||||
set("grpc.version", "1.63.0")
|
||||
set("hadoop.version", "3.4.1")
|
||||
set("hbase.version", "2.6.2")
|
||||
set("hikaricp.version", "3.4.5")
|
||||
set("hive-apache.version", "3.1.2-22")
|
||||
set("hudi.version", "1.0.2")
|
||||
set("iceberg.version", "1.9.0")
|
||||
set("io.netty.version", "4.1.118.Final")
|
||||
set("jackson.version", "2.15.2")
|
||||
set("jetty.version", "9.4.57.v20241219")
|
||||
set("jprotobuf-starrocks.version", "1.0.0")
|
||||
set("kafka-clients.version", "3.4.0")
|
||||
set("kudu.version", "1.17.1")
|
||||
set("log4j.version", "2.19.0")
|
||||
set("nimbusds.version", "9.37.2")
|
||||
set("odps.version", "0.48.7-public")
|
||||
set("paimon.version", "1.0.1")
|
||||
set("parquet.version", "1.15.2")
|
||||
set("protobuf-java.version", "3.25.5")
|
||||
set("puppycrawl.version", "10.21.1")
|
||||
set("spark.version", "3.5.5")
|
||||
set("staros.version", "3.5-rc2")
|
||||
set("tomcat.version", "8.5.70")
|
||||
// var sync end
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation(platform("com.azure:azure-sdk-bom:${project.ext["azure.version"]}"))
|
||||
implementation(platform("io.opentelemetry:opentelemetry-bom:1.14.0"))
|
||||
implementation(platform("software.amazon.awssdk:bom:${project.ext["aws-v2-sdk.version"]}"))
|
||||
|
||||
constraints {
|
||||
// dependency sync start
|
||||
implementation("com.aliyun.datalake:metastore-client-hive3:${project.ext["dlf-metastore-client.version"]}")
|
||||
implementation("com.aliyun.odps:odps-sdk-core:${project.ext["odps.version"]}")
|
||||
implementation("com.aliyun.odps:odps-sdk-table-api:${project.ext["odps.version"]}")
|
||||
implementation("com.aliyun:datalake20200710:2.0.12")
|
||||
implementation("com.baidu:jprotobuf-precompile-plugin:2.2.12")
|
||||
implementation("com.baidu:jprotobuf-rpc-common:1.9")
|
||||
implementation("com.baidu:jprotobuf-rpc-core:4.2.1")
|
||||
implementation("com.clickhouse:clickhouse-jdbc:0.4.6")
|
||||
implementation("com.esotericsoftware:kryo-shaded:4.0.2")
|
||||
implementation("com.fasterxml.jackson.core:jackson-annotations:${project.ext["jackson.version"]}")
|
||||
implementation("com.fasterxml.jackson.core:jackson-core:${project.ext["jackson.version"]}")
|
||||
implementation("com.fasterxml.jackson.core:jackson-databind:${project.ext["jackson.version"]}")
|
||||
implementation("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${project.ext["jackson.version"]}")
|
||||
implementation("com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${project.ext["jackson.version"]}")
|
||||
implementation("com.fasterxml.uuid:java-uuid-generator:5.1.0")
|
||||
implementation("com.github.ben-manes.caffeine:caffeine:2.9.3")
|
||||
implementation("com.github.hazendaz.jmockit:jmockit:1.49.4")
|
||||
implementation("com.github.oshi:oshi-core:6.2.1")
|
||||
implementation("com.github.seancfoley:ipaddress:5.4.2")
|
||||
implementation("com.google.cloud.bigdataoss:gcs-connector:${project.ext["gcs.connector.version"]}")
|
||||
implementation("com.google.code.gson:gson:2.8.9")
|
||||
implementation("com.google.guava:guava:32.0.1-jre")
|
||||
implementation("com.google.protobuf:protobuf-java:${project.ext["protobuf-java.version"]}")
|
||||
implementation("com.google.protobuf:protobuf-java-util:${project.ext["protobuf-java.version"]}")
|
||||
implementation("com.microsoft.sqlserver:mssql-jdbc:12.4.2.jre11")
|
||||
implementation("com.mockrunner:mockrunner-jdbc:1.0.1")
|
||||
implementation("com.nimbusds:nimbus-jose-jwt:${project.ext["nimbusds.version"]}")
|
||||
implementation("com.opencsv:opencsv:5.7.1")
|
||||
implementation("com.oracle.database.jdbc:ojdbc10:19.18.0.0")
|
||||
implementation("com.oracle.database.nls:orai18n:19.18.0.0")
|
||||
implementation("com.qcloud.cos:hadoop-cos:3.3.0-8.3.2")
|
||||
implementation("com.qcloud:chdfs_hadoop_plugin_network:3.2")
|
||||
implementation("com.squareup.okhttp3:okhttp:4.10.0")
|
||||
implementation("com.squareup.okio:okio:3.4.0")
|
||||
implementation("com.starrocks:fe-common:1.0.0")
|
||||
implementation("com.starrocks:hive-udf:1.0.0")
|
||||
implementation("com.starrocks:jprotobuf-starrocks:${project.ext["jprotobuf-starrocks.version"]}")
|
||||
implementation("com.starrocks:plugin-common:1.0.0")
|
||||
implementation("com.starrocks:spark-dpp:1.0.0")
|
||||
implementation("com.starrocks:starclient:${project.ext["staros.version"]}")
|
||||
implementation("com.starrocks:starmanager:${project.ext["staros.version"]}")
|
||||
implementation("com.starrocks:starrocks-bdb-je:18.3.20")
|
||||
implementation("com.sun.activation:javax.activation:1.2.0")
|
||||
implementation("commons-beanutils:commons-beanutils:${project.ext["commons-beanutils.version"]}")
|
||||
implementation("commons-cli:commons-cli:1.4")
|
||||
implementation("commons-codec:commons-codec:1.13")
|
||||
implementation("commons-collections:commons-collections:3.2.2")
|
||||
implementation("commons-io:commons-io:2.16.1")
|
||||
implementation("commons-lang:commons-lang:2.4")
|
||||
implementation("commons-validator:commons-validator:1.7")
|
||||
implementation("de.jflex:jflex:1.4.3")
|
||||
implementation("dnsjava:dnsjava:${project.ext["dnsjava.version"]}")
|
||||
implementation("io.airlift:aircompressor:0.27")
|
||||
implementation("io.airlift:concurrent:202")
|
||||
implementation("io.airlift:security:202")
|
||||
implementation("io.delta:delta-kernel-api:${project.ext["delta-kernel.version"]}")
|
||||
implementation("io.delta:delta-kernel-defaults:${project.ext["delta-kernel.version"]}")
|
||||
implementation("io.grpc:grpc-api:${project.ext["grpc.version"]}")
|
||||
implementation("io.grpc:grpc-core:${project.ext["grpc.version"]}")
|
||||
implementation("io.grpc:grpc-netty-shaded:${project.ext["grpc.version"]}")
|
||||
implementation("io.grpc:grpc-protobuf:${project.ext["grpc.version"]}")
|
||||
implementation("io.grpc:grpc-stub:${project.ext["grpc.version"]}")
|
||||
implementation("io.netty:netty-all:${project.ext["io.netty.version"]}")
|
||||
implementation("io.netty:netty-handler:${project.ext["io.netty.version"]}")
|
||||
implementation("io.trino.hive:hive-apache:${project.ext["hive-apache.version"]}")
|
||||
implementation("it.unimi.dsi:fastutil:${project.ext["fastutil.version"]}")
|
||||
implementation("javax.annotation:javax.annotation-api:1.3.2")
|
||||
implementation("javax.validation:validation-api:1.1.0.Final")
|
||||
implementation("javax.xml.ws:jaxws-api:2.3.0")
|
||||
implementation("net.sourceforge.czt.dev:java-cup:0.11-a-czt02-cdh")
|
||||
implementation("org.antlr:antlr4:4.9.2")
|
||||
implementation("org.apache.arrow:arrow-jdbc:${project.ext["arrow.version"]}")
|
||||
implementation("org.apache.arrow:arrow-memory-netty:${project.ext["arrow.version"]}")
|
||||
implementation("org.apache.arrow:arrow-vector:${project.ext["arrow.version"]}")
|
||||
implementation("org.apache.arrow:flight-core:${project.ext["arrow.version"]}")
|
||||
implementation("org.apache.arrow:flight-sql:${project.ext["arrow.version"]}")
|
||||
implementation("org.apache.arrow:flight-sql-jdbc-driver:${project.ext["arrow.version"]}")
|
||||
implementation("org.apache.avro:avro:${project.ext["avro.version"]}")
|
||||
implementation("org.apache.commons:commons-dbcp2:2.9.0")
|
||||
implementation("org.apache.commons:commons-lang3:3.9")
|
||||
implementation("org.apache.commons:commons-pool2:2.3")
|
||||
implementation("org.apache.groovy:groovy-groovysh:4.0.9")
|
||||
implementation("org.apache.hadoop:hadoop-aliyun:${project.ext["hadoop.version"]}")
|
||||
implementation("org.apache.hadoop:hadoop-aws:${project.ext["hadoop.version"]}")
|
||||
implementation("org.apache.hadoop:hadoop-azure:${project.ext["hadoop.version"]}")
|
||||
implementation("org.apache.hadoop:hadoop-azure-datalake:${project.ext["hadoop.version"]}")
|
||||
implementation("org.apache.hadoop:hadoop-client:${project.ext["hadoop.version"]}")
|
||||
implementation("org.apache.hadoop:hadoop-client-api:${project.ext["hadoop.version"]}")
|
||||
implementation("org.apache.hadoop:hadoop-client-runtime:${project.ext["hadoop.version"]}")
|
||||
implementation("org.apache.hadoop:hadoop-common:${project.ext["hadoop.version"]}")
|
||||
implementation("org.apache.hadoop:hadoop-hdfs:${project.ext["hadoop.version"]}")
|
||||
implementation("org.apache.hbase:hbase-client:${project.ext["hbase.version"]}")
|
||||
implementation("org.apache.hbase:hbase-server:${project.ext["hbase.version"]}")
|
||||
implementation("org.apache.httpcomponents.client5:httpclient5:5.4.3")
|
||||
implementation("org.apache.hudi:hudi-common:${project.ext["hudi.version"]}")
|
||||
implementation("org.apache.hudi:hudi-hadoop-mr:${project.ext["hudi.version"]}")
|
||||
implementation("org.apache.hudi:hudi-io:${project.ext["hudi.version"]}")
|
||||
implementation("org.apache.iceberg:iceberg-api:${project.ext["iceberg.version"]}")
|
||||
implementation("org.apache.iceberg:iceberg-aws:${project.ext["iceberg.version"]}")
|
||||
implementation("org.apache.iceberg:iceberg-bundled-guava:${project.ext["iceberg.version"]}")
|
||||
implementation("org.apache.iceberg:iceberg-common:${project.ext["iceberg.version"]}")
|
||||
implementation("org.apache.iceberg:iceberg-core:${project.ext["iceberg.version"]}")
|
||||
implementation("org.apache.iceberg:iceberg-hive-metastore:${project.ext["iceberg.version"]}")
|
||||
implementation("org.apache.ivy:ivy:2.5.2")
|
||||
implementation("org.apache.kafka:kafka-clients:${project.ext["kafka-clients.version"]}")
|
||||
implementation("org.apache.kudu:kudu-client:${project.ext["kudu.version"]}")
|
||||
implementation("org.apache.logging.log4j:log4j-1.2-api:${project.ext["log4j.version"]}")
|
||||
implementation("org.apache.logging.log4j:log4j-api:${project.ext["log4j.version"]}")
|
||||
implementation("org.apache.logging.log4j:log4j-core:${project.ext["log4j.version"]}")
|
||||
implementation("org.apache.logging.log4j:log4j-layout-template-json:${project.ext["log4j.version"]}")
|
||||
implementation("org.apache.logging.log4j:log4j-slf4j-impl:${project.ext["log4j.version"]}")
|
||||
implementation("org.apache.paimon:paimon-bundle:${project.ext["paimon.version"]}")
|
||||
implementation("org.apache.paimon:paimon-oss:${project.ext["paimon.version"]}")
|
||||
implementation("org.apache.paimon:paimon-s3:${project.ext["paimon.version"]}")
|
||||
implementation("org.apache.parquet:parquet-avro:${project.ext["parquet.version"]}")
|
||||
implementation("org.apache.parquet:parquet-column:${project.ext["parquet.version"]}")
|
||||
implementation("org.apache.parquet:parquet-common:${project.ext["parquet.version"]}")
|
||||
implementation("org.apache.parquet:parquet-hadoop:${project.ext["parquet.version"]}")
|
||||
implementation("org.apache.ranger:ranger-plugins-common:2.5.0")
|
||||
implementation("org.apache.spark:spark-catalyst_2.12:${project.ext["spark.version"]}")
|
||||
implementation("org.apache.spark:spark-core_2.12:${project.ext["spark.version"]}")
|
||||
implementation("org.apache.spark:spark-launcher_2.12:${project.ext["spark.version"]}")
|
||||
implementation("org.apache.spark:spark-sql_2.12:${project.ext["spark.version"]}")
|
||||
implementation("org.apache.thrift:libthrift:0.20.0")
|
||||
implementation("org.apache.velocity:velocity-engine-core:2.4.1")
|
||||
implementation("org.eclipse.jetty:jetty-client:${project.ext["jetty.version"]}")
|
||||
implementation("org.eclipse.jetty:jetty-io:${project.ext["jetty.version"]}")
|
||||
implementation("org.eclipse.jetty:jetty-security:${project.ext["jetty.version"]}")
|
||||
implementation("org.eclipse.jetty:jetty-server:${project.ext["jetty.version"]}")
|
||||
implementation("org.eclipse.jetty:jetty-servlet:${project.ext["jetty.version"]}")
|
||||
implementation("org.eclipse.jetty:jetty-util:${project.ext["jetty.version"]}")
|
||||
implementation("org.eclipse.jetty:jetty-util-ajax:${project.ext["jetty.version"]}")
|
||||
implementation("org.eclipse.jetty:jetty-webapp:${project.ext["jetty.version"]}")
|
||||
implementation("org.jboss.byteman:byteman:${project.ext["byteman.version"]}")
|
||||
implementation("org.jboss.xnio:xnio-nio:3.8.16.Final")
|
||||
implementation("org.jdom:jdom2:2.0.6.1")
|
||||
implementation("org.json:json:20231013")
|
||||
implementation("org.junit.jupiter:junit-jupiter:5.8.2")
|
||||
implementation("org.mariadb.jdbc:mariadb-java-client:3.3.2")
|
||||
implementation("org.owasp.encoder:encoder:1.3.1")
|
||||
implementation("org.postgresql:postgresql:42.4.4")
|
||||
implementation("org.roaringbitmap:RoaringBitmap:0.8.13")
|
||||
implementation("org.scala-lang:scala-library:2.12.10")
|
||||
implementation("org.slf4j:slf4j-api:1.7.30")
|
||||
implementation("org.xerial.snappy:snappy-java:1.1.10.5")
|
||||
implementation("software.amazon.awssdk:bundle:${project.ext["aws-v2-sdk.version"]}")
|
||||
implementation("tools.profiler:async-profiler:${project.ext["async-profiler.version"]}")
|
||||
// dependency sync end
|
||||
}
|
||||
}
|
||||
|
||||
tasks.withType<JavaCompile> {
|
||||
options.encoding = "UTF-8"
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
create<MavenPublication>("maven") {
|
||||
from(components["java"])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
// Copyright 2021-present StarRocks, Inc. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
plugins {
|
||||
java
|
||||
checkstyle
|
||||
}
|
||||
|
||||
java {
|
||||
sourceCompatibility = JavaVersion.VERSION_1_8
|
||||
targetCompatibility = JavaVersion.VERSION_1_8
|
||||
}
|
||||
|
||||
group = "com.starrocks"
|
||||
version = "1.0.0"
|
||||
|
||||
// Note: There are no explicit dependencies in the original pom.xml for this module
|
||||
|
||||
tasks.withType<Test> {
|
||||
// Configuration from Maven: failIfNoSpecifiedTests=false
|
||||
ignoreFailures = true
|
||||
}
|
||||
|
||||
tasks.withType<JavaCompile> {
|
||||
options.encoding = "UTF-8"
|
||||
}
|
||||
|
||||
checkstyle {
|
||||
toolVersion = project.findProperty("puppycrawl.version") as String? ?: "10.21.1"
|
||||
configFile = rootProject.file("checkstyle.xml")
|
||||
}
|
||||
|
||||
tasks.withType<Checkstyle> {
|
||||
exclude("**/jmockit/**/*")
|
||||
isShowViolations = true
|
||||
ignoreFailures = false
|
||||
}
|
||||
|
|
@ -0,0 +1,501 @@
|
|||
// Copyright 2021-present StarRocks, Inc. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import com.baidu.jprotobuf.plugin.PrecompileTask
|
||||
import org.gradle.api.tasks.testing.logging.TestExceptionFormat
|
||||
import org.gradle.api.tasks.testing.logging.TestLogEvent
|
||||
|
||||
plugins {
|
||||
java
|
||||
antlr
|
||||
id("com.baidu.jprotobuf") version "1.2.1"
|
||||
}
|
||||
|
||||
java {
|
||||
sourceCompatibility = JavaVersion.VERSION_17
|
||||
targetCompatibility = JavaVersion.VERSION_17
|
||||
sourceSets {
|
||||
main {
|
||||
java {
|
||||
srcDir("src/main/java")
|
||||
srcDir("build/generated-sources/antlr4")
|
||||
srcDir("build/generated-sources/proto")
|
||||
srcDir("build/generated-sources/thrift")
|
||||
srcDir("build/generated-sources/genscript")
|
||||
}
|
||||
}
|
||||
test {
|
||||
java {
|
||||
srcDir("src/test/java")
|
||||
}
|
||||
resources {
|
||||
srcDir("src/test/resources")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//configurations.configureEach {
|
||||
// if (name != "thriftGenClasspath") {
|
||||
// // thrfit gen plugin uses antlr 4.13, which conflict with current antlr version
|
||||
// resolutionStrategy.force("org.antlr:antlr4-runtime:4.9.3")
|
||||
// }
|
||||
//}
|
||||
|
||||
dependencies {
|
||||
antlr("org.antlr:antlr4:4.9.3")
|
||||
|
||||
// Internal project dependencies
|
||||
implementation(project(":fe-common"))
|
||||
implementation(project(":plugin-common"))
|
||||
implementation(project(":hive-udf"))
|
||||
implementation(project(":spark-dpp"))
|
||||
|
||||
// dependency sync start
|
||||
implementation("com.aliyun.datalake:metastore-client-hive3") {
|
||||
exclude(group = "com.aliyun", module = "tea")
|
||||
exclude(group = "com.aliyun", module = "tea-openapi")
|
||||
exclude(group = "com.aliyun", module = "tea-util")
|
||||
exclude(group = "com.aliyun", module = "datalake20200710")
|
||||
}
|
||||
implementation("com.aliyun.odps:odps-sdk-core") {
|
||||
exclude(group = "org.codehaus.jackson", module = "jackson-mapper-asl")
|
||||
exclude(group = "org.ini4j", module = "ini4j")
|
||||
}
|
||||
implementation("com.aliyun.odps:odps-sdk-table-api")
|
||||
implementation("com.azure:azure-identity")
|
||||
implementation("com.azure:azure-storage-blob")
|
||||
compileOnly("com.baidu:jprotobuf-precompile-plugin") {
|
||||
exclude(group = "org.apache.maven", module = "maven-core")
|
||||
exclude(group = "org.codehaus.plexus", module = "plexus-utils")
|
||||
exclude(group = "junit", module = "junit")
|
||||
}
|
||||
implementation("com.baidu:jprotobuf-rpc-common")
|
||||
implementation("com.baidu:jprotobuf-rpc-core") {
|
||||
exclude(group = "com.baidu", module = "jprotobuf")
|
||||
exclude(group = "junit", module = "junit")
|
||||
}
|
||||
implementation("com.clickhouse:clickhouse-jdbc")
|
||||
implementation("com.fasterxml.jackson.core:jackson-annotations")
|
||||
implementation("com.fasterxml.jackson.core:jackson-core")
|
||||
implementation("com.fasterxml.jackson.core:jackson-databind")
|
||||
implementation("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml")
|
||||
implementation("com.fasterxml.jackson.module:jackson-module-jaxb-annotations")
|
||||
implementation("com.fasterxml.uuid:java-uuid-generator")
|
||||
implementation("com.github.ben-manes.caffeine:caffeine")
|
||||
testImplementation("com.github.hazendaz.jmockit:jmockit")
|
||||
implementation("com.github.oshi:oshi-core")
|
||||
implementation("com.github.seancfoley:ipaddress")
|
||||
implementation("com.google.cloud.bigdataoss:gcs-connector")
|
||||
implementation("com.google.code.gson:gson")
|
||||
implementation("com.google.guava:guava")
|
||||
implementation("com.google.protobuf:protobuf-java")
|
||||
implementation("com.google.protobuf:protobuf-java-util")
|
||||
implementation("com.microsoft.sqlserver:mssql-jdbc")
|
||||
testImplementation("com.mockrunner:mockrunner-jdbc") {
|
||||
exclude(group = "xerces", module = "xercesImpl")
|
||||
exclude(group = "junit", module = "junit")
|
||||
}
|
||||
implementation("com.opencsv:opencsv")
|
||||
implementation("com.oracle.database.jdbc:ojdbc10")
|
||||
implementation("com.oracle.database.nls:orai18n")
|
||||
implementation("com.qcloud.cos:hadoop-cos")
|
||||
implementation("com.qcloud:chdfs_hadoop_plugin_network")
|
||||
implementation("com.squareup.okhttp3:okhttp")
|
||||
implementation("com.squareup.okio:okio")
|
||||
implementation("com.starrocks:spark-dpp")
|
||||
implementation("com.starrocks:starclient")
|
||||
implementation("com.starrocks:starmanager")
|
||||
implementation("com.starrocks:starrocks-bdb-je") {
|
||||
exclude(group = "org.checkerframework", module = "checker-qual")
|
||||
}
|
||||
implementation("com.sun.activation:javax.activation")
|
||||
implementation("com.zaxxer:HikariCP:${project.ext["hikaricp.version"]}")
|
||||
implementation("commons-cli:commons-cli")
|
||||
implementation("commons-codec:commons-codec")
|
||||
implementation("commons-io:commons-io")
|
||||
implementation("commons-lang:commons-lang")
|
||||
implementation("commons-validator:commons-validator") {
|
||||
exclude(group = "commons-collections", module = "commons-collections")
|
||||
}
|
||||
implementation("de.jflex:jflex")
|
||||
implementation("io.airlift:concurrent")
|
||||
implementation("io.airlift:security")
|
||||
implementation("io.delta:delta-kernel-api")
|
||||
implementation("io.delta:delta-kernel-defaults") {
|
||||
exclude(group = "org.apache.hadoop", module = "hadoop-client-api")
|
||||
exclude(group = "org.apache.hadoop", module = "hadoop-client-runtime")
|
||||
}
|
||||
implementation("io.grpc:grpc-api")
|
||||
implementation("io.grpc:grpc-core")
|
||||
implementation("io.grpc:grpc-netty-shaded")
|
||||
implementation("io.grpc:grpc-protobuf")
|
||||
implementation("io.grpc:grpc-stub")
|
||||
implementation("io.netty:netty-all")
|
||||
implementation("io.opentelemetry:opentelemetry-api")
|
||||
implementation("io.opentelemetry:opentelemetry-exporter-jaeger")
|
||||
implementation("io.opentelemetry:opentelemetry-exporter-otlp")
|
||||
implementation("io.opentelemetry:opentelemetry-sdk")
|
||||
implementation("io.trino.hive:hive-apache") {
|
||||
exclude(group = "org.apache.parquet", module = "*")
|
||||
exclude(group = "org.apache.avro", module = "*")
|
||||
}
|
||||
implementation("io.trino:trino-parser:385")
|
||||
implementation("it.unimi.dsi:fastutil")
|
||||
implementation("javax.annotation:javax.annotation-api")
|
||||
implementation("javax.validation:validation-api")
|
||||
implementation("net.openhft:zero-allocation-hashing:0.16")
|
||||
implementation("org.antlr:antlr4") {
|
||||
exclude(group = "commons-lang", module = "commons-lang")
|
||||
}
|
||||
implementation("org.apache.arrow:arrow-jdbc")
|
||||
implementation("org.apache.arrow:arrow-memory-netty")
|
||||
implementation("org.apache.arrow:arrow-vector")
|
||||
implementation("org.apache.arrow:flight-core")
|
||||
implementation("org.apache.arrow:flight-sql")
|
||||
testImplementation("org.apache.arrow:flight-sql-jdbc-driver")
|
||||
testImplementation("org.apache.commons:commons-dbcp2")
|
||||
implementation("org.apache.commons:commons-lang3")
|
||||
implementation("org.apache.commons:commons-pool2")
|
||||
implementation("org.apache.groovy:groovy-groovysh")
|
||||
implementation("org.apache.hadoop:hadoop-aliyun") {
|
||||
exclude(group = "org.jdom", module = "jdom2")
|
||||
exclude(group = "org.ini4j", module = "ini4j")
|
||||
}
|
||||
implementation("org.apache.hadoop:hadoop-aws") {
|
||||
exclude(group = "software.amazon.awssdk", module = "bundle")
|
||||
exclude(group = "org.apache.hadoop", module = "hadoop-common")
|
||||
}
|
||||
implementation("org.apache.hadoop:hadoop-azure")
|
||||
implementation("org.apache.hadoop:hadoop-azure-datalake")
|
||||
implementation("org.apache.hadoop:hadoop-client") {
|
||||
exclude(group = "org.slf4j", module = "slf4j-reload4j")
|
||||
exclude(group = "ch.qos.reload4j", module = "reload4j")
|
||||
}
|
||||
implementation("org.apache.hadoop:hadoop-client-api")
|
||||
implementation("org.apache.hadoop:hadoop-client-runtime") {
|
||||
exclude(group = "dnsjava", module = "dnsjava")
|
||||
exclude(group = "org.apache.avro", module = "avro")
|
||||
}
|
||||
implementation("org.apache.hadoop:hadoop-common") {
|
||||
exclude(group = "org.apache.zookeeper", module = "zookeeper")
|
||||
exclude(group = "org.slf4j", module = "slf4j-reload4j")
|
||||
exclude(group = "ch.qos.reload4j", module = "reload4j")
|
||||
exclude(group = "javax.ws.rs", module = "jsr311-api")
|
||||
}
|
||||
implementation("org.apache.hadoop:hadoop-hdfs")
|
||||
implementation("org.apache.httpcomponents.client5:httpclient5")
|
||||
implementation("org.apache.hudi:hudi-common") {
|
||||
exclude(group = "io.netty", module = "*")
|
||||
exclude(group = "org.glassfish", module = "javax.el")
|
||||
exclude(group = "org.apache.zookeeper", module = "zookeeper")
|
||||
}
|
||||
implementation("org.apache.hudi:hudi-hadoop-mr") {
|
||||
exclude(group = "org.glassfish", module = "javax.el")
|
||||
}
|
||||
implementation("org.apache.hudi:hudi-io")
|
||||
implementation("org.apache.iceberg:iceberg-api") {
|
||||
exclude(group = "org.apache.parquet", module = "parquet-format-structures")
|
||||
}
|
||||
implementation("org.apache.iceberg:iceberg-aws")
|
||||
implementation("org.apache.iceberg:iceberg-bundled-guava")
|
||||
implementation("org.apache.iceberg:iceberg-common")
|
||||
implementation("org.apache.iceberg:iceberg-core")
|
||||
implementation("org.apache.iceberg:iceberg-hive-metastore")
|
||||
implementation("org.apache.ivy:ivy")
|
||||
implementation("org.apache.kudu:kudu-client") {
|
||||
exclude(group = "io.netty", module = "netty-handler")
|
||||
}
|
||||
implementation("org.apache.logging.log4j:log4j-1.2-api")
|
||||
implementation("org.apache.logging.log4j:log4j-api")
|
||||
implementation("org.apache.logging.log4j:log4j-core")
|
||||
implementation("org.apache.logging.log4j:log4j-layout-template-json")
|
||||
implementation("org.apache.logging.log4j:log4j-slf4j-impl")
|
||||
implementation("org.apache.paimon:paimon-bundle")
|
||||
implementation("org.apache.paimon:paimon-oss")
|
||||
implementation("org.apache.paimon:paimon-s3")
|
||||
implementation("org.apache.parquet:parquet-avro")
|
||||
implementation("org.apache.parquet:parquet-column")
|
||||
implementation("org.apache.parquet:parquet-common")
|
||||
implementation("org.apache.parquet:parquet-hadoop")
|
||||
implementation("org.apache.ranger:ranger-plugins-common") {
|
||||
exclude(group = "org.elasticsearch", module = "*")
|
||||
exclude(group = "org.elasticsearch.client", module = "*")
|
||||
exclude(group = "com.nimbusds", module = "nimbus-jose-jwt")
|
||||
exclude(group = "com.sun.jersey", module = "jersey-bundle")
|
||||
}
|
||||
compileOnly("org.apache.spark:spark-catalyst_2.12")
|
||||
implementation("org.apache.spark:spark-core_2.12") {
|
||||
exclude(group = "org.slf4j", module = "slf4j-log4j12")
|
||||
exclude(group = "org.apache.zookeeper", module = "zookeeper")
|
||||
exclude(group = "org.apache.logging.log4j", module = "log4j-slf4j2-impl")
|
||||
exclude(group = "org.apache.hadoop", module = "hadoop-client")
|
||||
exclude(group = "org.apache.hadoop", module = "hadoop-client-api")
|
||||
exclude(group = "org.apache.ivy", module = "ivy")
|
||||
exclude(group = "log4j", module = "log4j")
|
||||
exclude(group = "com.clearspring.analytics", module = "stream")
|
||||
exclude(group = "org.apache.hadoop", module = "hadoop-client-runtime")
|
||||
exclude(group = "org.apache.commons", module = "commons-compress")
|
||||
exclude(group = "com.google.protobuf", module = "protobuf-java")
|
||||
exclude(group = "org.eclipse.jetty", module = "jetty-server")
|
||||
exclude(group = "org.eclipse.jetty", module = "jetty-util")
|
||||
exclude(group = "org.eclipse.jetty", module = "jetty-io")
|
||||
exclude(group = "org.eclipse.jetty", module = "jetty-servlet")
|
||||
exclude(group = "org.eclipse.jetty", module = "jetty-client")
|
||||
exclude(group = "org.eclipse.jetty", module = "jetty-security")
|
||||
}
|
||||
implementation("org.apache.spark:spark-launcher_2.12")
|
||||
compileOnly("org.apache.spark:spark-sql_2.12")
|
||||
implementation("org.apache.thrift:libthrift") {
|
||||
exclude(group = "org.apache.tomcat.embed", module = "tomcat-embed-core")
|
||||
exclude(group = "org.apache.tomcat", module = "tomcat-annotations-api")
|
||||
}
|
||||
implementation("org.apache.velocity:velocity-engine-core")
|
||||
testImplementation("org.assertj:assertj-core:3.24.2")
|
||||
testImplementation("org.awaitility:awaitility:4.2.0")
|
||||
implementation("org.jboss.byteman:byteman")
|
||||
implementation("org.jboss.xnio:xnio-nio")
|
||||
implementation("org.jdom:jdom2")
|
||||
implementation("org.json:json")
|
||||
testImplementation("org.junit.jupiter:junit-jupiter")
|
||||
implementation("org.mariadb.jdbc:mariadb-java-client")
|
||||
testImplementation("org.mockito:mockito-inline:4.11.0")
|
||||
testImplementation("org.openjdk.jmh:jmh-core:1.37")
|
||||
testImplementation("org.openjdk.jmh:jmh-generator-annprocess:1.37")
|
||||
implementation("org.owasp.encoder:encoder")
|
||||
implementation("org.postgresql:postgresql")
|
||||
implementation("org.quartz-scheduler:quartz:2.5.0")
|
||||
implementation("org.roaringbitmap:RoaringBitmap") {
|
||||
exclude(group = "org.apache.zookeeper", module = "zookeeper")
|
||||
}
|
||||
implementation("org.slf4j:slf4j-api")
|
||||
implementation("org.threeten:threeten-extra:1.7.2")
|
||||
implementation("org.xerial.snappy:snappy-java")
|
||||
implementation("software.amazon.awssdk:bundle")
|
||||
implementation("tools.profiler:async-profiler")
|
||||
// dependency sync end
|
||||
|
||||
// extra dependencies pom.xml does not have
|
||||
implementation("com.starrocks:jprotobuf-starrocks:${project.ext["jprotobuf-starrocks.version"]}")
|
||||
implementation("org.apache.groovy:groovy:4.0.9")
|
||||
testImplementation("org.apache.spark:spark-sql_2.12")
|
||||
implementation("org.antlr:antlr4-runtime:4.9.3")
|
||||
implementation("software.amazon.awssdk:s3-transfer-manager")
|
||||
implementation("net.openhft:zero-allocation-hashing:0.16")
|
||||
}
|
||||
|
||||
// Configure ANTLR plugin
|
||||
tasks.generateGrammarSource {
|
||||
maxHeapSize = "512m"
|
||||
// Add the -lib argument to tell ANTLR where to find imported grammars
|
||||
arguments = arguments + listOf(
|
||||
"-visitor",
|
||||
"-package", "com.starrocks.sql.parser",
|
||||
)
|
||||
outputDirectory = layout.buildDirectory.get().dir("generated-sources/antlr4/com/starrocks/sql/parser").asFile
|
||||
}
|
||||
|
||||
// Custom task for Protocol Buffer generation
|
||||
tasks.register<Task>("generateProtoSources") {
|
||||
description = "Generates Java source files from Protocol Buffer definitions"
|
||||
group = "build"
|
||||
|
||||
// Create a special configuration for the protobuf compiler rather than using runtime classpath
|
||||
val protoGenClasspath = configurations.create("protoGenClasspath")
|
||||
dependencies {
|
||||
protoGenClasspath("com.starrocks:jprotobuf-starrocks:${project.ext["jprotobuf-starrocks.version"]}:jar-with-dependencies")
|
||||
}
|
||||
|
||||
val protoDir = file("../../gensrc/proto")
|
||||
val outputDir = layout.buildDirectory.get().dir("generated-sources/proto").asFile
|
||||
|
||||
// List of proto files to process
|
||||
val protoFiles = listOf(
|
||||
"lake_types.proto",
|
||||
"internal_service.proto",
|
||||
"types.proto",
|
||||
"tablet_schema.proto",
|
||||
"lake_service.proto",
|
||||
"encryption.proto"
|
||||
)
|
||||
|
||||
// Declare inputs (proto files)
|
||||
inputs.files(protoFiles.map { file("$protoDir/$it") })
|
||||
|
||||
// Declare output directory
|
||||
outputs.dir(outputDir)
|
||||
|
||||
doFirst {
|
||||
mkdir(outputDir)
|
||||
|
||||
// Process each proto file individually
|
||||
protoFiles.forEach { protoFile ->
|
||||
logger.info("Processing proto file: $protoFile")
|
||||
project.javaexec {
|
||||
classpath = protoGenClasspath
|
||||
mainClass.set("com.baidu.bjf.remoting.protobuf.command.Main")
|
||||
args = listOf(
|
||||
"--java_out=$outputDir",
|
||||
"$protoDir/$protoFile"
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Custom task for Thrift generation
|
||||
tasks.register<Task>("generateThriftSources") {
|
||||
description = "Generates Java source files from Thrift definitions"
|
||||
group = "build"
|
||||
|
||||
// Create a special configuration for the thrift compiler rather than using runtime classpath
|
||||
val thriftGenClasspath = configurations.create("thriftGenClasspath")
|
||||
dependencies {
|
||||
thriftGenClasspath("io.github.decster:thrift-java-maven-plugin:0.1.3")
|
||||
}
|
||||
|
||||
val protoDir = file("../../gensrc/thrift")
|
||||
val outputDir = layout.buildDirectory.get().dir("generated-sources/thrift").asFile
|
||||
|
||||
// List of proto files to process
|
||||
val protoFiles = fileTree(protoDir) {
|
||||
include("*.thrift")
|
||||
exclude("parquet.thrift")
|
||||
}.files
|
||||
|
||||
// Declare inputs (proto files)
|
||||
inputs.files(protoFiles)
|
||||
|
||||
// Declare output directory
|
||||
outputs.dir(outputDir)
|
||||
|
||||
doFirst {
|
||||
mkdir(outputDir)
|
||||
// Process each proto file individually
|
||||
project.javaexec {
|
||||
classpath = thriftGenClasspath
|
||||
mainClass.set("io.github.decster.ThriftCompiler")
|
||||
// Build arguments list with the output directory and all thrift files
|
||||
val allArgs = mutableListOf("-o", "$outputDir")
|
||||
protoFiles.forEach { file ->
|
||||
allArgs.add(file.absolutePath)
|
||||
}
|
||||
args = allArgs
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
tasks.register<Task>("generateByScripts") {
|
||||
description = "Generates java code by scripts"
|
||||
group = "build"
|
||||
|
||||
val outputDir = layout.buildDirectory.get().dir("generated-sources/genscript").asFile
|
||||
|
||||
outputs.dir(outputDir)
|
||||
|
||||
doFirst {
|
||||
mkdir(outputDir)
|
||||
|
||||
// First Python script - build version generation
|
||||
project.exec {
|
||||
commandLine(
|
||||
"python3",
|
||||
"${project.rootProject.projectDir}/../build-support/gen_build_version.py",
|
||||
"--cpp", outputDir.toString(),
|
||||
"--java", outputDir.toString()
|
||||
)
|
||||
}
|
||||
|
||||
// Second Python script - function generation
|
||||
project.exec {
|
||||
commandLine(
|
||||
"python3",
|
||||
"${project.rootProject.projectDir}/../gensrc/script/gen_functions.py",
|
||||
"--cpp", outputDir.toString(),
|
||||
"--java", outputDir.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add source generation tasks to the build process
|
||||
tasks.compileJava {
|
||||
dependsOn("generateGrammarSource", "generateThriftSources", "generateProtoSources", "generateByScripts")
|
||||
}
|
||||
|
||||
tasks.named<PrecompileTask>("jprotobuf_precompile") {
|
||||
filterClassPackage = "com.starrocks.proto;com.starrocks.rpc;com.starrocks.server"
|
||||
generateProtoFile = "true"
|
||||
}
|
||||
|
||||
tasks.named<ProcessResources>("processTestResources") {
|
||||
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
|
||||
}
|
||||
|
||||
// Configure test task
|
||||
tasks.test {
|
||||
useJUnitPlatform()
|
||||
maxParallelForks = (project.findProperty("fe_ut_parallel") as String? ?: "8").toInt()
|
||||
|
||||
// Don't reuse JVM processes for tests
|
||||
forkEvery = 1
|
||||
|
||||
maxHeapSize = "4096m"
|
||||
|
||||
testLogging {
|
||||
// Events to log, like you have
|
||||
events = setOf(
|
||||
TestLogEvent.PASSED,
|
||||
TestLogEvent.SKIPPED,
|
||||
TestLogEvent.FAILED
|
||||
)
|
||||
|
||||
// Show the standard output and error streams of the test JVM(s)
|
||||
showStandardStreams = true
|
||||
|
||||
// Configure how exceptions are displayed
|
||||
exceptionFormat = TestExceptionFormat.FULL // Or SHORT
|
||||
showStackTraces = true
|
||||
showCauses = true // Show underlying causes for exceptions
|
||||
}
|
||||
|
||||
systemProperty("starrocks.home", project.ext["starrocks.home"] as String)
|
||||
|
||||
// Add JMockit Java agent to JVM arguments
|
||||
jvmArgs(
|
||||
"-Djdk.attach.allowAttachSelf",
|
||||
"-Duser.timezone=Asia/Shanghai",
|
||||
"-javaagent:${configurations.testCompileClasspath.get().find { it.name.contains("jmockit") }?.absolutePath}"
|
||||
)
|
||||
|
||||
// Use independent class loading (equivalent to useSystemClassLoader=false)
|
||||
systemProperty("java.security.manager", "allow")
|
||||
|
||||
// Exclude specific tests
|
||||
//exclude("**/QueryDumpRegressionTest.class")
|
||||
}
|
||||
|
||||
|
||||
// Configure JAR task
|
||||
tasks.jar {
|
||||
//dependsOn("jprotobuf_precompile")
|
||||
manifest {
|
||||
attributes(
|
||||
"Main-Class" to "com.starrocks.StarRocksFE",
|
||||
"Implementation-Version" to project.version
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1 @@
|
|||
../java/com/starrocks/sql/parser/StarRocks.g4
|
||||
|
|
@ -0,0 +1 @@
|
|||
../java/com/starrocks/sql/parser/StarRocksLex.g4
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
// Copyright 2021-present StarRocks, Inc. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
org.gradle.jvmargs=-Xmx4g -XX:MaxMetaspaceSize=512m
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
// Copyright 2021-present StarRocks, Inc. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
plugins {
|
||||
java
|
||||
id("com.github.johnrengelman.shadow") version "8.1.1"
|
||||
}
|
||||
|
||||
version = "1.0.0"
|
||||
|
||||
java {
|
||||
sourceCompatibility = JavaVersion.VERSION_1_8
|
||||
targetCompatibility = JavaVersion.VERSION_1_8
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation(project(":plugin-common"))
|
||||
compileOnly("io.trino.hive:hive-apache")
|
||||
compileOnly("org.apache.hadoop:hadoop-client")
|
||||
}
|
||||
|
||||
tasks.withType<JavaCompile> {
|
||||
options.release.set(8)
|
||||
}
|
||||
|
||||
|
||||
// Replace the jar task with shadowJar configuration
|
||||
tasks.named<com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar>("shadowJar") {
|
||||
// Minimize JAR
|
||||
minimize {
|
||||
exclude(dependency("${project.group}:plugin-common:${project.version}"))
|
||||
}
|
||||
|
||||
// Relocate packages
|
||||
relocate("org.roaringbitmap", "shade.starrocks.org.roaringbitmap")
|
||||
|
||||
// Filter artifacts and exclude signatures
|
||||
exclude("META-INF/*.SF", "META-INF/*.DSA", "META-INF/*.RSA")
|
||||
exclude("org/apache/logging/log4j/**")
|
||||
|
||||
mergeServiceFiles()
|
||||
archiveClassifier.set("")
|
||||
}
|
||||
|
||||
// Make the shadowJar task run when the build task runs
|
||||
tasks.build {
|
||||
dependsOn(tasks.shadowJar)
|
||||
}
|
||||
|
||||
// Equivalent to Maven Surefire plugin
|
||||
tasks.test {
|
||||
val failIfNoSpecifiedTests: String by project
|
||||
val failIfNoSpecifiedTestsValue = project.findProperty("failIfNoSpecifiedTests") ?: "false"
|
||||
systemProperty("failIfNoSpecifiedTests", failIfNoSpecifiedTestsValue)
|
||||
}
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
// Copyright 2021-present StarRocks, Inc. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
plugins {
|
||||
java
|
||||
}
|
||||
|
||||
java {
|
||||
sourceCompatibility = JavaVersion.VERSION_1_8
|
||||
targetCompatibility = JavaVersion.VERSION_1_8
|
||||
}
|
||||
|
||||
group = "com.starrocks"
|
||||
version = "1.0.0"
|
||||
|
||||
dependencies {
|
||||
implementation("com.google.guava:guava")
|
||||
implementation("org.roaringbitmap:RoaringBitmap")
|
||||
|
||||
testImplementation("org.junit.jupiter:junit-jupiter")
|
||||
testImplementation("com.github.hazendaz.jmockit:jmockit")
|
||||
}
|
||||
|
||||
tasks.withType<Test> {
|
||||
// Configure JMockit agent for tests
|
||||
jvmArgs("-javaagent:${repositories.mavenLocal().url.path}/com/github/hazendaz/jmockit/jmockit/1.49.4/jmockit-1.49.4.jar")
|
||||
|
||||
// Set for parallel test execution as in the Maven config
|
||||
maxParallelForks = providers.gradleProperty("fe_ut_parallel").map { it.toInt() }.getOrElse(1)
|
||||
|
||||
// Equivalent to reuseForks=false in Maven
|
||||
forkEvery = 1
|
||||
}
|
||||
|
||||
tasks.withType<JavaCompile> {
|
||||
options.encoding = "UTF-8"
|
||||
}
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
// Copyright 2021-present StarRocks, Inc. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
rootProject.name = "starrocks-fe"
|
||||
|
||||
include(
|
||||
"plugin-common",
|
||||
"fe-common",
|
||||
"spark-dpp",
|
||||
"fe-core",
|
||||
"hive-udf"
|
||||
)
|
||||
|
|
@ -0,0 +1,119 @@
|
|||
// Copyright 2021-present StarRocks, Inc. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
plugins {
|
||||
java
|
||||
checkstyle
|
||||
}
|
||||
|
||||
java {
|
||||
sourceCompatibility = JavaVersion.VERSION_1_8
|
||||
targetCompatibility = JavaVersion.VERSION_1_8
|
||||
}
|
||||
|
||||
group = "com.starrocks"
|
||||
version = "1.0.0"
|
||||
|
||||
// Property equivalent to fe_ut_parallel in Maven
|
||||
val feUtParallel = project.findProperty("fe_ut_parallel") ?: "1"
|
||||
|
||||
dependencies {
|
||||
// StarRocks modules
|
||||
implementation(project(":fe-common"))
|
||||
implementation(project(":plugin-common"))
|
||||
|
||||
// Regular dependencies
|
||||
implementation("com.google.guava:guava")
|
||||
implementation("com.google.code.gson:gson")
|
||||
implementation("io.netty:netty-handler")
|
||||
implementation("org.roaringbitmap:RoaringBitmap")
|
||||
|
||||
// Provided scope dependencies - equivalent to compileOnly in Gradle
|
||||
compileOnly("commons-codec:commons-codec")
|
||||
compileOnly("org.apache.commons:commons-lang3")
|
||||
compileOnly("org.apache.spark:spark-core_2.12")
|
||||
compileOnly("org.apache.spark:spark-sql_2.12")
|
||||
compileOnly("org.apache.spark:spark-catalyst_2.12")
|
||||
compileOnly("org.apache.hadoop:hadoop-common") {
|
||||
exclude(group = "io.netty")
|
||||
}
|
||||
compileOnly("org.apache.parquet:parquet-column")
|
||||
compileOnly("org.apache.parquet:parquet-hadoop")
|
||||
compileOnly("org.apache.parquet:parquet-common")
|
||||
compileOnly("commons-collections:commons-collections")
|
||||
compileOnly("org.scala-lang:scala-library")
|
||||
compileOnly("com.esotericsoftware:kryo-shaded")
|
||||
compileOnly("org.apache.logging.log4j:log4j-slf4j-impl")
|
||||
|
||||
// Test dependencies
|
||||
testImplementation("org.junit.jupiter:junit-jupiter")
|
||||
testImplementation("com.github.hazendaz.jmockit:jmockit")
|
||||
testImplementation("org.apache.spark:spark-sql_2.12")
|
||||
}
|
||||
|
||||
tasks.withType<Test> {
|
||||
// Configure JMockit agent for tests
|
||||
jvmArgs("-javaagent:${repositories.mavenLocal().url.path}/com/github/hazendaz/jmockit/jmockit/1.49.4/jmockit-1.49.4.jar")
|
||||
|
||||
// Set for parallel test execution as in the Maven config
|
||||
maxParallelForks = (feUtParallel as String).toInt()
|
||||
|
||||
// Equivalent to reuseForks=false in Maven
|
||||
forkEvery = 1
|
||||
}
|
||||
|
||||
tasks.withType<JavaCompile> {
|
||||
options.encoding = "UTF-8"
|
||||
}
|
||||
|
||||
checkstyle {
|
||||
toolVersion = project.findProperty("puppycrawl.version") as String? ?: "10.21.1"
|
||||
configFile = rootProject.file("checkstyle.xml")
|
||||
}
|
||||
|
||||
tasks.withType<Checkstyle> {
|
||||
exclude("**/jmockit/**/*")
|
||||
isShowViolations = true
|
||||
ignoreFailures = false
|
||||
}
|
||||
|
||||
// Equivalent to Maven Assembly plugin to create a jar with dependencies
|
||||
tasks.register<Jar>("jarWithDependencies") {
|
||||
archiveClassifier.set("with-dependencies")
|
||||
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
|
||||
|
||||
manifest {
|
||||
attributes["Main-Class"] = "com.starrocks.load.loadv2.etl.SparkEtlJob"
|
||||
}
|
||||
|
||||
from(sourceSets.main.get().output)
|
||||
|
||||
dependsOn(configurations.runtimeClasspath)
|
||||
from({
|
||||
configurations.runtimeClasspath.get()
|
||||
.filter { it.name.endsWith("jar") }
|
||||
.map { zipTree(it) }
|
||||
})
|
||||
}
|
||||
|
||||
// Make the jarWithDependencies task run as part of the build
|
||||
tasks.build {
|
||||
dependsOn("jarWithDependencies")
|
||||
}
|
||||
|
||||
// Set the final JAR name
|
||||
tasks.jar {
|
||||
archiveBaseName.set("spark-dpp")
|
||||
archiveVersion.set(project.version.toString())
|
||||
}
|
||||
Loading…
Reference in New Issue