diff --git a/docker/docker-compose/README.md b/docker/docker-compose/README.md
index 828a05324ee..de0576c058f 100644
--- a/docker/docker-compose/README.md
+++ b/docker/docker-compose/README.md
@@ -1,53 +1 @@
-
-
-# Docker Compose for StarRocks
-
-This directory contains the Docker Compose YAML files for the StarRocks deployment.
-
-You can deploy a StarRocks cluster with one BE node using [**docker-compose.yml**](./docker-compose.yml), or simulate a distributed StarRocks cluster with multiple BEs on a single instance using [docker-compose-3BE.yml](./docker-compose-3BE.yml).
-
-Note that deploying with docker compose is only recommended in a testing environment, as high availability cannot be guaranteed with a single instance deployment.
-## Deploy StarRocks using Docker Compose
-
-Run the following command to deploy StarRocks using Docker Compose:
-
-```shell
-docker-compose up -d
-```
-
-The commented-out sections in the above example YAML file define the mount paths and volumes that FE and BE use to persist data.
-Note that root privilege is required to deploy StarRocks with Docker with persistent volume.
-
-## Check cluster status
-
-After StarRocks is deployed, check the cluster status:
-
-1. Connect to the cluster with the IP address of the FE instance. You can get the IP address of an instance using `docker inspect`.
-
- ```shell
- mysql -h -P9030 -uroot
- ```
-
-2. Check the status of the BE node.
-
- ```shell
- show backends;
- ```
-
- If the field Alive is true, this BE node is properly started and added to the cluster.
-
-## Troubleshooting
-
-When you connect to the cluster, StarRocks may return the following error:
-
-```shell
- ERROR 2003 (HY000): Can't connect to MySQL server on 'starrocks-fe:9030' (111)
-```
-
-The reason may be that the BE node was started before the FE node is ready. To solve this problem, re-run the docker compose up command, or manually add the BE node to the cluster using the following command:
-
-```sql
-ADD BACKEND ":9050";
-```
-
-Replace `` with the actual IP address of the BE node.
+Please see https://github.com/StarRocks/demo/tree/master/deploy/docker-compose for more information
diff --git a/docker/docker-compose/docker-compose-3BE.yml b/docker/docker-compose/docker-compose-3BE.yml
deleted file mode 100644
index 456fc56c21c..00000000000
--- a/docker/docker-compose/docker-compose-3BE.yml
+++ /dev/null
@@ -1,67 +0,0 @@
-version: "3.9"
-services:
- starrocks-fe:
- image: starrocks/fe-ubuntu:latest
- hostname: starrocks-fe
- container_name: starrocks-fe
- #user: root
- command: /opt/starrocks/fe/bin/start_fe.sh
- ports:
- - 1030:8030
- - 2020:9020
- - 3030:9030
-# volumes:
-# - path/of/host/fe.conf:/opt/starrocks/fe/conf/fe.conf
-# - path/of/host/fe/meta:/opt/starrocks/fe/meta
-
- starrocks-be1:
- image: starrocks/be-ubuntu:latest
- #user: root
- command:
- - /bin/bash
- - -c
- - |
- sleep 15s; mysql --connect-timeout 2 -h starrocks-fe -P9030 -uroot -e "alter system add backend \"starrocks-be1:9050\";"
- /opt/starrocks/be/bin/start_be.sh
-
- hostname: starrocks-be1
- container_name: starrocks-be1
- depends_on:
- - "starrocks-fe"
-# volumes:
-# - path/of/host/be.conf:/opt/starrocks/be/conf/be.conf
-# - path/of/host/starrocks-be1/storage:/opt/starrocks/be/storage
- starrocks-be2:
- image: starrocks/be-ubuntu:latest
- #user: root
- command:
- - /bin/bash
- - -c
- - |
- sleep 15s; mysql --connect-timeout 2 -h starrocks-fe -P9030 -uroot -e "alter system add backend \"starrocks-be2:9050\";"
- /opt/starrocks/be/bin/start_be.sh
-
- hostname: starrocks-be2
- container_name: starrocks-be2
- depends_on:
- - "starrocks-fe"
-# volumes:
-# - path/of/host/be.conf:/opt/starrocks/be/conf/be.conf
-# - path/of/host/starrocks-be2/storage:/opt/starrocks/be/storage
- starrocks-be3:
- image: starrocks/be-ubuntu:latest
- #user: root
- command:
- - /bin/bash
- - -c
- - |
- sleep 15s; mysql --connect-timeout 2 -h starrocks-fe -P9030 -uroot -e "alter system add backend \"starrocks-be3:9050\";"
- /opt/starrocks/be/bin/start_be.sh
-
- hostname: starrocks-be3
- container_name: starrocks-be3
- depends_on:
- - "starrocks-fe"
-# volumes:
-# - path/of/host/be.conf:/opt/starrocks/be/conf/be.conf
-# - path/of/host/starrocks-be3/storage:/opt/starrocks/be/storage
diff --git a/docker/docker-compose/docker-compose.yml b/docker/docker-compose/docker-compose.yml
deleted file mode 100644
index eade91f8963..00000000000
--- a/docker/docker-compose/docker-compose.yml
+++ /dev/null
@@ -1,36 +0,0 @@
-version: "3.9"
-services:
- starrocks-fe:
- image: starrocks/fe-ubuntu:latest
- hostname: starrocks-fe
- container_name: starrocks-fe
- command:
- /opt/starrocks/fe/bin/start_fe.sh
- ports:
- - 8030:8030
- - 9020:9020
- - 9030:9030
- # volumes:
- # - ../../conf/fe.conf:/opt/starrocks/fe/conf/fe.conf
- healthcheck:
- test: ["CMD", "curl", "-f", "http://localhost:9030"]
- interval: 5s
- timeout: 5s
- retries: 30
-
- starrocks-be:
- image: starrocks/be-ubuntu:latest
- command:
- - /bin/bash
- - -c
- - |
- sleep 15s; mysql --connect-timeout 2 -h starrocks-fe -P9030 -uroot -e "alter system add backend \"starrocks-be:9050\";"
- /opt/starrocks/be/bin/start_be.sh
- ports:
- - 8040:8040
- hostname: starrocks-be
- container_name: starrocks-be
- depends_on:
- - starrocks-fe
- # volumes:
- # - ../../conf/be.conf:/opt/starrocks/be/conf/be.conf
diff --git a/docker/starrocks-stack-compose/README.md b/docker/starrocks-stack-compose/README.md
deleted file mode 100644
index d3ff6b7e994..00000000000
--- a/docker/starrocks-stack-compose/README.md
+++ /dev/null
@@ -1,82 +0,0 @@
-
-
-# StarRocks Stacks Docker Compose
-
-Docker Compose for StarRocks AllIn1 container and a few other peripheral applications for StarRocks development and test on local laptop
-
-## This compose file composes the following services:
-- starrocks allin1 local cluster service.
-- minio local service (emulation for S3, used by testing broker load).
-- azurite service (emulation for Azure blob storage, used by testing broker load).
-- zeppelin local service with: (used as sql notebook and data visualization)
- - mysql jdbc interpreter pre-configured and connected to starrocks allin1 service.
- - sample starrocks notebook.
-
-
-
-**Note**: Please refer to [docker-compose.yml](docker-compose.yml) to customize the container port mapping, volume mount, or other docker configs.
-
-## Start the StarRocks stack compose environment
-
-### Start all services in the compose file
-```shell
-COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose up -d
-```
-
-### Start a specific service(s) in the compose file
-
-```shell
-COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose up -d starrocks minio
-```
-
-### Stop the Celostar compose environment
-```shell
-docker compose down
-```
-## Minio Local Service
-MinIO Object Storage Server Running locally.
-This service can be accessed from:
-- **local laptop**
- - Storage API: http://127.0.0.1:9000
- - Console: http://127.0.0.1:9001
-- **docker network**
- - Storage API: http://minio.local.com:9000
- - Console: http://minio.local.com:9001
-
-
-Documentation: https://min.io/docs/minio/linux/index.html
-
-
-
-
-## Azurite Service
-Azure blob store emulator service running locally.
-This service can be accessed from:
-- **local laptop**
- - Azurite Blob service: http://127.0.0.1:10000
- - Azurite Queue service: http://127.0.0.1:10001
- - Azurite Table service: http://127.0.0.1:10002
-- **docker network**
- - Azurite Blob service: http://azurite.local.com:10000
- - Azurite Queue service: http://azurite.local.com:10001
- - Azurite Table service: http://azurite.local.com:10002
-
-### Use Intellij Bigdata Tool plugin to access azurite
-The Bigdata Tool supports drags and drops files betwen Azure blob store and local file system or other Blob Store such as S3, Minio.
-
-
-
-### [Azurite Command Line Tool ](https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azurite?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&bc=%2Fazure%2Fstorage%2Fblobs%2Fbreadcrumb%2Ftoc.json&tabs=visual-studio#command-line-options)
-Documentation: [Use the Azurite emulator for local Azure Storage development](https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azurite?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&bc=%2Fazure%2Fstorage%2Fblobs%2Fbreadcrumb%2Ftoc.json&tabs=visual-studio )
-
-
-
-## Zeppelin Local Service
-Zeppelin Notebook Service running locally.
-This service can be accessed from:
-- **local laptop**: http://127.0.0.1:8089
-- **docker network**: http://zeppelin.local.com:8089
-
-The Zeppelin Notebook can also be accessed via Big Data Tools Intellij plugin
-
-
diff --git a/docker/starrocks-stack-compose/azurite1.png b/docker/starrocks-stack-compose/azurite1.png
deleted file mode 100644
index 3689a785c4b..00000000000
Binary files a/docker/starrocks-stack-compose/azurite1.png and /dev/null differ
diff --git a/docker/starrocks-stack-compose/azurite2.png b/docker/starrocks-stack-compose/azurite2.png
deleted file mode 100644
index 40de3772359..00000000000
Binary files a/docker/starrocks-stack-compose/azurite2.png and /dev/null differ
diff --git a/docker/starrocks-stack-compose/docker-compose.yml b/docker/starrocks-stack-compose/docker-compose.yml
deleted file mode 100644
index e0a261cfbc3..00000000000
--- a/docker/starrocks-stack-compose/docker-compose.yml
+++ /dev/null
@@ -1,95 +0,0 @@
-# This docker compose file builds a StarRocks local development and test environment.
-
-version: "3.9"
-
-networks:
- starrocks-stack-network:
- driver: bridge
-
-services:
- starrocks:
- container_name: starrocks
- hostname: starrocks.local.com
- platform: "linux/amd64"
- # TODO: Replace image with any image your want to test
- image: starrocks/allin1-ubuntu:3.0.0-rc01
- restart: unless-stopped
- ports: # port mapping format "[host port]:[container port]", can be configured to your preferred port
- - "8030:8030"
- - "8040:8040"
- - "9030:9030"
- healthcheck:
- test: 'mysql -uroot -h127.0.0.1 -P 9030 -e "show backends\G" |grep "Alive: true"'
- interval: 10s
- timeout: 5s
- retries: 3
- networks: # network config, can be configured to your preferred port and ip address, if not specified, it will use default network and assign a dynamic ip
- starrocks-stack-network:
- # Configure persistent volume to keep the StarRocks database state across container recreation.
- # This way, even you have to recreate the container, e.g. updating the image from one version to another, the
- # StarRocks database will be preserved.
- volumes: # volume mapping format "[host volume directory]:[container volume directory]", can be configured to your preferred mount point
- - ${HOME}/dv/starrocks/be/storage:/data/deploy/starrocks/be/storage
- - ${HOME}/dv/starrocks/be/log:/data/deploy/starrocks/be/log
- - ${HOME}/dv/starrocks/fe/meta:/data/deploy/starrocks/fe/meta
- - ${HOME}/dv/starrocks/fe/log:/data/deploy/starrocks/fe/log
-
- minio:
- container_name: minio
- platform: "linux/amd64"
- hostname: minio.local.com
- image: quay.io/minio/minio
- restart: unless-stopped
- ports: # port mapping format "[host port]:[container port]", can be configured to your preferred port
- - "9000:9000"
- - "9001:9001"
- healthcheck:
- test: ["CMD", "curl", "-f", "http://minio.local.com:9000/minio/health/live"]
- interval: 10s
- timeout: 5s
- retries: 3
- volumes:
- - ${HOME}/dv/minio/data:/data
- networks: # network config, can be configured to your preferred port and ip address, if not specified, it will use default network and assign a dynamic ip
- starrocks-stack-network:
- environment:
- MINIO_ROOT_USER: root
- MINIO_ROOT_PASSWORD: rootroot
- command: server /data --console-address ":9001"
-
-
-# Azure emulator service: https://github.com/Azure/Azurite
- azurite:
- container_name: azurite
- platform: "linux/amd64"
- hostname: azurite.local.com
- image: mcr.microsoft.com/azure-storage/azurite
- restart: unless-stopped
- environment:
- AZURITE_ACCOUNTS: "root:rootroot" # TODO: customize your own account credential
- ports:
- - "10000:10000" # blob service
- - "10001:10001" # queue service
- - "10002:10002" # table service
- volumes: # volume mapping format "[host volume directory]:[container volume directory]", can be configured to your preferred mount point
- - ${HOME}/dv/azurite/data:/data
- networks: # network config, can be configured to your preferred port and ip address, if not specified, it will use default network and assign a dynamic ip
- starrocks-stack-network:
-
- zeppelin:
- container_name: zeppelin
- platform: "linux/amd64"
- hostname: zeppelin.local.com
- build:
- context: zeppelin4starrocks
- dockerfile: Dockerfile
- restart: unless-stopped
- ports: # port mapping format "[host port]:[container port]", can be configured to your preferred port
- - "8089:8080"
- healthcheck:
- test: ["CMD", "curl", "-f", "http://zeppelin.local.com:8080"]
- interval: 10s
- timeout: 5s
- retries: 3
- networks: # network config, can be configured to your preferred port and ip address, if not specified, it will use default network and assign a dynamic ip
- starrocks-stack-network:
diff --git a/docker/starrocks-stack-compose/minio-consle.png b/docker/starrocks-stack-compose/minio-consle.png
deleted file mode 100644
index cd600d62e69..00000000000
Binary files a/docker/starrocks-stack-compose/minio-consle.png and /dev/null differ
diff --git a/docker/starrocks-stack-compose/starrocks-stack.png b/docker/starrocks-stack-compose/starrocks-stack.png
deleted file mode 100644
index e5626612cb6..00000000000
Binary files a/docker/starrocks-stack-compose/starrocks-stack.png and /dev/null differ
diff --git a/docker/starrocks-stack-compose/zeppelin.png b/docker/starrocks-stack-compose/zeppelin.png
deleted file mode 100644
index a2e76f1e0b0..00000000000
Binary files a/docker/starrocks-stack-compose/zeppelin.png and /dev/null differ
diff --git a/docker/starrocks-stack-compose/zeppelin4starrocks/Dockerfile b/docker/starrocks-stack-compose/zeppelin4starrocks/Dockerfile
deleted file mode 100644
index 0cec575790b..00000000000
--- a/docker/starrocks-stack-compose/zeppelin4starrocks/Dockerfile
+++ /dev/null
@@ -1,6 +0,0 @@
-FROM apache/zeppelin:0.10.1
-
-# update interpreter config(default to jdbc:mysql) before starting the service
-ADD interpreter.json /opt/zeppelin/conf/
-
-CMD ["bin/zeppelin.sh"]
diff --git a/docker/starrocks-stack-compose/zeppelin4starrocks/README.md b/docker/starrocks-stack-compose/zeppelin4starrocks/README.md
deleted file mode 100644
index 702f7ddfccb..00000000000
--- a/docker/starrocks-stack-compose/zeppelin4starrocks/README.md
+++ /dev/null
@@ -1 +0,0 @@
-A Docker file that customize the zeppelin docker with pre-configured mysql-connector-java:5.1.38 and default interpreter to JDBC.
diff --git a/docker/starrocks-stack-compose/zeppelin4starrocks/interpreter.json b/docker/starrocks-stack-compose/zeppelin4starrocks/interpreter.json
deleted file mode 100644
index ffff669c021..00000000000
--- a/docker/starrocks-stack-compose/zeppelin4starrocks/interpreter.json
+++ /dev/null
@@ -1,3149 +0,0 @@
-{
- "interpreterSettings": {
- "beam": {
- "id": "beam",
- "name": "beam",
- "group": "beam",
- "properties": {
- "zeppelin.scio.argz": {
- "name": "zeppelin.scio.argz",
- "value": "--runner\u003dInProcessPipelineRunner",
- "type": "textarea",
- "description": "Scio interpreter wide arguments"
- },
- "zeppelin.scio.maxResult": {
- "name": "zeppelin.scio.maxResult",
- "value": "1000",
- "type": "number",
- "description": "Max number of SCollection results to display."
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "beam",
- "class": "org.apache.zeppelin.beam.BeamInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": false
- }
- },
- {
- "name": "scio",
- "class": "org.apache.zeppelin.scio.ScioInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "scala"
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "ignite": {
- "id": "ignite",
- "name": "ignite",
- "group": "ignite",
- "properties": {
- "ignite.addresses": {
- "name": "ignite.addresses",
- "value": "127.0.0.1:47500..47509",
- "type": "textarea",
- "description": "Comma separated list of addresses (e.g. 127.0.0.1:47500 or 127.0.0.1:47500..47509)"
- },
- "ignite.clientMode": {
- "name": "ignite.clientMode",
- "value": true,
- "type": "checkbox",
- "description": "Client mode. true or false"
- },
- "ignite.config.url": {
- "name": "ignite.config.url",
- "value": "",
- "type": "url",
- "description": "Configuration URL. Overrides all other settings."
- },
- "ignite.peerClassLoadingEnabled": {
- "name": "ignite.peerClassLoadingEnabled",
- "value": true,
- "type": "checkbox",
- "description": "Peer class loading enabled. True or false"
- },
- "ignite.jdbc.url": {
- "name": "ignite.jdbc.url",
- "value": "jdbc:ignite:cfg://default-ignite-jdbc.xml",
- "type": "string",
- "description": "Ignite JDBC connection URL."
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "ignite",
- "class": "org.apache.zeppelin.ignite.IgniteInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": false,
- "completionSupport": false
- }
- },
- {
- "name": "ignitesql",
- "class": "org.apache.zeppelin.ignite.IgniteSqlInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": false,
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "geode": {
- "id": "geode",
- "name": "geode",
- "group": "geode",
- "properties": {
- "geode.locator.host": {
- "name": "geode.locator.host",
- "value": "localhost",
- "type": "string",
- "description": "The Geode Locator Host."
- },
- "geode.locator.port": {
- "name": "geode.locator.port",
- "value": "10334",
- "type": "number",
- "description": "The Geode Locator Port."
- },
- "geode.max.result": {
- "name": "geode.max.result",
- "value": "1000",
- "type": "number",
- "description": "Max number of OQL result to display."
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "oql",
- "class": "org.apache.zeppelin.geode.GeodeOqlInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sql"
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "jdbc": {
- "id": "jdbc",
- "name": "jdbc",
- "group": "jdbc",
- "properties": {
- "default.url": {
- "name": "default.url",
- "value": "jdbc:mysql://starrocks.local.com:9030",
- "type": "string",
- "description": "The URL for JDBC."
- },
- "default.user": {
- "name": "default.user",
- "value": "root",
- "type": "string",
- "description": "The JDBC user name"
- },
- "default.password": {
- "name": "default.password",
- "value": "",
- "type": "password",
- "description": "The JDBC user password"
- },
- "default.driver": {
- "name": "default.driver",
- "value": "com.mysql.jdbc.Driver",
- "type": "string",
- "description": "JDBC Driver Name"
- },
- "default.completer.ttlInSeconds": {
- "name": "default.completer.ttlInSeconds",
- "value": "120",
- "type": "number",
- "description": "Time to live sql completer in seconds (-1 to update everytime, 0 to disable update)"
- },
- "default.completer.schemaFilters": {
- "name": "default.completer.schemaFilters",
- "value": "",
- "type": "textarea",
- "description": "Сomma separated schema (schema \u003d catalog \u003d database) filters to get metadata for completions. Supports \u0027%\u0027 symbol is equivalent to any set of characters. (ex. prod_v_%,public%,info)"
- },
- "default.precode": {
- "name": "default.precode",
- "value": "",
- "type": "textarea",
- "description": "SQL which executes while opening connection"
- },
- "default.statementPrecode": {
- "name": "default.statementPrecode",
- "value": "",
- "type": "textarea",
- "description": "Runs before each run of the paragraph, in the same connection"
- },
- "common.max_count": {
- "name": "common.max_count",
- "value": "1000",
- "type": "number",
- "description": "Max number of SQL result to display."
- },
- "zeppelin.jdbc.auth.type": {
- "name": "zeppelin.jdbc.auth.type",
- "value": "",
- "type": "string",
- "description": "If auth type is needed, Example: KERBEROS"
- },
- "zeppelin.jdbc.auth.kerberos.proxy.enable": {
- "name": "zeppelin.jdbc.auth.kerberos.proxy.enable",
- "value": "true",
- "type": "checkbox",
- "description": "When auth type is Kerberos, enable/disable Kerberos proxy with the login user to get the connection. Default value is true."
- },
- "zeppelin.jdbc.concurrent.use": {
- "name": "zeppelin.jdbc.concurrent.use",
- "value": true,
- "type": "checkbox",
- "description": "Use parallel scheduler"
- },
- "zeppelin.jdbc.concurrent.max_connection": {
- "name": "zeppelin.jdbc.concurrent.max_connection",
- "value": "10",
- "type": "number",
- "description": "Number of concurrent execution"
- },
- "zeppelin.jdbc.keytab.location": {
- "name": "zeppelin.jdbc.keytab.location",
- "value": "",
- "type": "string",
- "description": "Kerberos keytab location"
- },
- "zeppelin.jdbc.principal": {
- "name": "zeppelin.jdbc.principal",
- "value": "",
- "type": "string",
- "description": "Kerberos principal"
- },
- "zeppelin.jdbc.interpolation": {
- "name": "zeppelin.jdbc.interpolation",
- "value": false,
- "type": "checkbox",
- "description": "Enable ZeppelinContext variable interpolation into paragraph text"
- },
- "zeppelin.jdbc.maxConnLifetime": {
- "name": "zeppelin.jdbc.maxConnLifetime",
- "value": "-1",
- "type": "number",
- "description": "Maximum of connection lifetime in milliseconds. A value of zero or less means the connection has an infinite lifetime."
- },
- "zeppelin.jdbc.maxRows": {
- "name": "zeppelin.jdbc.maxRows",
- "value": "1000",
- "type": "number",
- "description": "Maximum number of rows fetched from the query."
- },
- "zeppelin.jdbc.hive.timeout.threshold": {
- "name": "zeppelin.jdbc.hive.timeout.threshold",
- "value": "60000",
- "type": "number",
- "description": "Timeout for hive job timeout"
- },
- "zeppelin.jdbc.hive.monitor.query_interval": {
- "name": "zeppelin.jdbc.hive.monitor.query_interval",
- "value": "1000",
- "type": "number",
- "description": "Query interval for hive statement"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "sql",
- "class": "org.apache.zeppelin.jdbc.JDBCInterpreter",
- "defaultInterpreter": true,
- "editor": {
- "language": "sql",
- "editOnDblClick": false,
- "completionSupport": true
- }
- }
- ],
- "dependencies": [
- {
- "groupArtifactVersion": "mysql:mysql-connector-java:5.1.38",
- "local": false
- }
- ],
- "option": {
- "remote": true,
- "port": -1,
- "perNote": "shared",
- "perUser": "shared",
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "lens": {
- "id": "lens",
- "name": "lens",
- "group": "lens",
- "properties": {
- "zeppelin.lens.run.concurrent": {
- "name": "zeppelin.lens.run.concurrent",
- "value": true,
- "type": "checkbox",
- "description": "Run concurrent Lens Sessions"
- },
- "zeppelin.lens.maxThreads": {
- "name": "zeppelin.lens.maxThreads",
- "value": "10",
- "type": "number",
- "description": "If concurrency is true then how many threads?"
- },
- "zeppelin.lens.maxResults": {
- "name": "zeppelin.lens.maxResults",
- "value": "1000",
- "type": "number",
- "description": "max number of rows to display"
- },
- "lens.server.base.url": {
- "name": "lens.server.base.url",
- "value": "http://\u003chostname\u003e:\u003cport\u003e/lensapi",
- "type": "url",
- "description": "The URL for Lens Server"
- },
- "lens.client.dbname": {
- "name": "lens.client.dbname",
- "value": "default",
- "type": "string",
- "description": "The database schema name"
- },
- "lens.query.enable.persistent.resultset": {
- "name": "lens.query.enable.persistent.resultset",
- "value": false,
- "type": "checkbox",
- "description": "Apache Lens to persist result in HDFS?"
- },
- "lens.session.cluster.user": {
- "name": "lens.session.cluster.user",
- "value": "default",
- "type": "string",
- "description": "Hadoop cluster username"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "lens",
- "class": "org.apache.zeppelin.lens.LensInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": false,
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "influxdb": {
- "id": "influxdb",
- "name": "influxdb",
- "group": "influxdb",
- "properties": {
- "influxdb.url": {
- "name": "influxdb.url",
- "value": "http://localhost:9999",
- "type": "string",
- "description": "The URL for InfluxDB 2.X API"
- },
- "influxdb.token": {
- "name": "influxdb.token",
- "value": "my-token",
- "type": "password",
- "description": "InfluxDB auth token"
- },
- "influxdb.org": {
- "name": "influxdb.org",
- "value": "my-org",
- "type": "string",
- "description": "InfluxDB org name"
- },
- "influxdb.logLevel": {
- "name": "influxdb.logLevel",
- "value": "NONE",
- "type": "string",
- "description": "InfluxDB http client library verbosity level (NONE, BASIC, HEADERS, BODY)"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "influxdb",
- "class": "org.apache.zeppelin.influxdb.InfluxDBInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sql",
- "editOnDblClick": false,
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "pig": {
- "id": "pig",
- "name": "pig",
- "group": "pig",
- "properties": {
- "zeppelin.pig.execType": {
- "name": "zeppelin.pig.execType",
- "value": "mapreduce",
- "type": "string",
- "description": "local | mapreduce | tez_local | tez | spark_local | spark"
- },
- "zeppelin.pig.includeJobStats": {
- "name": "zeppelin.pig.includeJobStats",
- "value": false,
- "type": "checkbox",
- "description": "flag to include job stats in output"
- },
- "SPARK_MASTER": {
- "name": "SPARK_MASTER",
- "value": "local",
- "type": "string",
- "description": "local | yarn-client"
- },
- "SPARK_JAR": {
- "name": "SPARK_JAR",
- "value": "",
- "type": "textarea",
- "description": "spark assembly jar uploaded in hdfs"
- },
- "zeppelin.pig.maxResult": {
- "name": "zeppelin.pig.maxResult",
- "value": "1000",
- "type": "number",
- "description": "max row number for %pig.query"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "script",
- "class": "org.apache.zeppelin.pig.PigInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "pig",
- "editOnDblClick": false
- }
- },
- {
- "name": "query",
- "class": "org.apache.zeppelin.pig.PigQueryInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "pig",
- "editOnDblClick": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "file": {
- "id": "file",
- "name": "file",
- "group": "file",
- "properties": {
- "hdfs.url": {
- "name": "hdfs.url",
- "value": "http://localhost:50070/webhdfs/v1/",
- "type": "url",
- "description": "The URL for WebHDFS"
- },
- "hdfs.user": {
- "name": "hdfs.user",
- "value": "hdfs",
- "type": "string",
- "description": "The WebHDFS user"
- },
- "hdfs.maxlength": {
- "name": "hdfs.maxlength",
- "value": "1000",
- "type": "number",
- "description": "Maximum number of lines of results fetched"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "hdfs",
- "class": "org.apache.zeppelin.file.HDFSFileInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": false,
- "completionSupport": true
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "spark-submit": {
- "id": "spark-submit",
- "name": "spark-submit",
- "group": "spark-submit",
- "properties": {
- "SPARK_HOME": {
- "name": "SPARK_HOME",
- "value": "",
- "type": "string",
- "description": "Location of spark distribution"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "submit",
- "class": "org.apache.zeppelin.spark.submit.SparkSubmitInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sh",
- "editOnDblClick": false,
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "java": {
- "id": "java",
- "name": "java",
- "group": "java",
- "properties": {},
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "java",
- "class": "org.apache.zeppelin.java.JavaInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "java",
- "editOnDblClick": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "jupyter": {
- "id": "jupyter",
- "name": "jupyter",
- "group": "jupyter",
- "properties": {},
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "jupyter",
- "class": "org.apache.zeppelin.jupyter.JupyterInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "text",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "elasticsearch": {
- "id": "elasticsearch",
- "name": "elasticsearch",
- "group": "elasticsearch",
- "properties": {
- "elasticsearch.host": {
- "name": "elasticsearch.host",
- "value": "localhost",
- "type": "string",
- "description": "The host for Elasticsearch"
- },
- "elasticsearch.port": {
- "name": "elasticsearch.port",
- "value": "9300",
- "type": "number",
- "description": "The port for Elasticsearch"
- },
- "elasticsearch.client.type": {
- "name": "elasticsearch.client.type",
- "value": "transport",
- "type": "string",
- "description": "The type of client for Elasticsearch (transport or http)"
- },
- "elasticsearch.cluster.name": {
- "name": "elasticsearch.cluster.name",
- "value": "elasticsearch",
- "type": "string",
- "description": "The cluster name for Elasticsearch"
- },
- "elasticsearch.result.size": {
- "name": "elasticsearch.result.size",
- "value": "10",
- "type": "number",
- "description": "The size of the result set of a search query"
- },
- "elasticsearch.basicauth.username": {
- "name": "elasticsearch.basicauth.username",
- "value": "",
- "type": "string",
- "description": "Username for a basic authentication"
- },
- "elasticsearch.basicauth.password": {
- "name": "elasticsearch.basicauth.password",
- "value": "",
- "type": "password",
- "description": "Password for a basic authentication"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "elasticsearch",
- "class": "org.apache.zeppelin.elasticsearch.ElasticsearchInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": false,
- "completionSupport": true
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "submarine": {
- "id": "submarine",
- "name": "submarine",
- "group": "submarine",
- "properties": {
- "zeppelin.submarine.auth.type": {
- "name": "zeppelin.submarine.auth.type",
- "value": "kerberos",
- "type": "string",
- "description": "simple or kerberos"
- },
- "yarn.webapp.http.address": {
- "name": "yarn.webapp.http.address",
- "value": "",
- "type": "string",
- "description": "YARN web ui address"
- },
- "INTERPRETER_LAUNCH_MODE": {
- "name": "INTERPRETER_LAUNCH_MODE",
- "value": "local",
- "type": "string",
- "description": "Submarine interpreter launch in local/yarn"
- },
- "HADOOP_YARN_SUBMARINE_JAR": {
- "name": "HADOOP_YARN_SUBMARINE_JAR",
- "value": "",
- "type": "string",
- "description": "Submarine executive full path, ex) ../hadoop/share/hadoop/yarn/hadoop-yarn-submarine-x.x.x.jar"
- },
- "DOCKER_HADOOP_HDFS_HOME": {
- "name": "DOCKER_HADOOP_HDFS_HOME",
- "value": "",
- "type": "string",
- "description": "hadoop home in docker container"
- },
- "DOCKER_JAVA_HOME": {
- "name": "DOCKER_JAVA_HOME",
- "value": "",
- "type": "string",
- "description": "java home in docker container"
- },
- "SUBMARINE_INTERPRETER_DOCKER_IMAGE": {
- "name": "SUBMARINE_INTERPRETER_DOCKER_IMAGE",
- "value": "",
- "type": "string",
- "description": "Docker image of submarine interpreter"
- },
- "zeppelin.interpreter.rpc.portRange": {
- "name": "zeppelin.interpreter.rpc.portRange",
- "value": "29914",
- "type": "string",
- "description": "The process port of the submarine interpreter container mapping defined in the `scripts/docker/interpreter/submarine/tensorflow_gpu/Dockerfile` file"
- },
- "submarine.yarn.queue": {
- "name": "submarine.yarn.queue",
- "value": "root.default",
- "type": "string",
- "description": "submarine queue name of yarn"
- },
- "submarine.hadoop.home": {
- "name": "submarine.hadoop.home",
- "value": "",
- "type": "string",
- "description": "submarine user-defined HADOOP_HOME"
- },
- "SUBMARINE_HADOOP_CONF_DIR": {
- "name": "SUBMARINE_HADOOP_CONF_DIR",
- "value": "",
- "type": "string",
- "description": "submarine user-defined HADOOP_CONF_DIR"
- },
- "submarine.hadoop.krb5.conf": {
- "name": "submarine.hadoop.krb5.conf",
- "value": "/etc/krb5.conf",
- "type": "string",
- "description": "submarine user-defined hdfs/yarn kerberos authentication krb5.conf"
- },
- "SUBMARINE_HADOOP_KEYTAB": {
- "name": "SUBMARINE_HADOOP_KEYTAB",
- "value": "",
- "type": "string",
- "description": "submarine hdfs/yarn kerberos authentication"
- },
- "SUBMARINE_HADOOP_PRINCIPAL": {
- "name": "SUBMARINE_HADOOP_PRINCIPAL",
- "value": "",
- "type": "string",
- "description": "submarine hdfs/yarn kerberos authentication"
- },
- "docker.container.network": {
- "name": "docker.container.network",
- "value": "",
- "type": "string",
- "description": "Network name in the docker container"
- },
- "DOCKER_CONTAINER_TIME_ZONE": {
- "name": "DOCKER_CONTAINER_TIME_ZONE",
- "value": "Etc/UTC",
- "type": "string",
- "description": "docker container time zone"
- },
- "zeppelin.interpreter.connect.timeout": {
- "name": "zeppelin.interpreter.connect.timeout",
- "value": "100000",
- "type": "number",
- "description": "zeppelin interpreter connect timeout"
- },
- "submarine.algorithm.hdfs.path": {
- "name": "submarine.algorithm.hdfs.path",
- "value": "hdfs://...",
- "type": "string",
- "description": "Algorithm file upload HDFS path, Support ${username} variable symbol, For example: hdfs:///usr/${username}"
- },
- "tf.parameter.services.num": {
- "name": "tf.parameter.services.num",
- "value": "1",
- "type": "number",
- "description": "Number of parameter services"
- },
- "tf.worker.services.num": {
- "name": "tf.worker.services.num",
- "value": "0",
- "type": "number",
- "description": "Number of worker services"
- },
- "tf.parameter.services.docker.image": {
- "name": "tf.parameter.services.docker.image",
- "value": "",
- "type": "string",
- "description": "Docker image of parameter services"
- },
- "tf.parameter.services.gpu": {
- "name": "tf.parameter.services.gpu",
- "value": "0",
- "type": "number",
- "description": "GPU number of parameter services"
- },
- "tf.parameter.services.cpu": {
- "name": "tf.parameter.services.cpu",
- "value": "2",
- "type": "number",
- "description": "CPU number of parameter services"
- },
- "tf.parameter.services.memory": {
- "name": "tf.parameter.services.memory",
- "value": "2G",
- "type": "string",
- "description": "Memory number of parameter services"
- },
- "tf.worker.services.docker.image": {
- "name": "tf.worker.services.docker.image",
- "value": "",
- "type": "string",
- "description": "Docker image of worker services"
- },
- "tf.worker.services.gpu": {
- "name": "tf.worker.services.gpu",
- "value": "0",
- "type": "number",
- "description": "GPU number of worker services"
- },
- "tf.worker.services.cpu": {
- "name": "tf.worker.services.cpu",
- "value": "2",
- "type": "number",
- "description": "CPU number of worker services"
- },
- "tf.worker.services.memory": {
- "name": "tf.worker.services.memory",
- "value": "4G",
- "type": "string",
- "description": "Memory number of worker services"
- },
- "tf.tensorboard.enable": {
- "name": "tf.tensorboard.enable",
- "value": true,
- "type": "checkbox",
- "description": "Whether to enable tensorboard"
- },
- "tf.checkpoint.path": {
- "name": "tf.checkpoint.path",
- "value": "",
- "type": "string",
- "description": "tensorflow checkpoint path"
- },
- "zeppelin.python": {
- "name": "zeppelin.python",
- "value": "python",
- "type": "string",
- "description": "Python directory. It is set to python by default.(assume python is in your $PATH)"
- },
- "zeppelin.python.maxResult": {
- "name": "zeppelin.python.maxResult",
- "value": "1000",
- "type": "number",
- "description": "Max number of dataframe rows to display."
- },
- "zeppelin.python.useIPython": {
- "name": "zeppelin.python.useIPython",
- "value": false,
- "type": "checkbox",
- "description": "whether use IPython when it is available"
- },
- "machinelearning.distributed.enable": {
- "name": "machinelearning.distributed.enable",
- "value": false,
- "type": "checkbox",
- "description": "Running distributed machine learning"
- },
- "zeppelin.ipython.launch.timeout": {
- "name": "zeppelin.ipython.launch.timeout",
- "value": "30000",
- "type": "number",
- "description": "time out for ipython launch"
- },
- "zeppelin.ipython.grpc.message_size": {
- "name": "zeppelin.ipython.grpc.message_size",
- "value": "33554432",
- "type": "number",
- "description": "grpc message size, default is 32M"
- },
- "shell.command.timeout.millisecs": {
- "name": "shell.command.timeout.millisecs",
- "value": "60000",
- "type": "number",
- "description": "Shell command time out in millisecs. Default \u003d 60000"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "submarine",
- "class": "org.apache.zeppelin.submarine.SubmarineInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sh",
- "editOnDblClick": false,
- "completionSupport": false
- },
- "config": {
- "fontSize": 9.0,
- "colWidth": 12.0,
- "runOnSelectionChange": false,
- "title": true
- }
- },
- {
- "name": "python",
- "class": "org.apache.zeppelin.submarine.PySubmarineInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "python",
- "editOnDblClick": false,
- "completionSupport": true
- }
- },
- {
- "name": "ipython",
- "class": "org.apache.zeppelin.submarine.IPySubmarineInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "python",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- },
- {
- "name": "sh",
- "class": "org.apache.zeppelin.submarine.SubmarineShellInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sh",
- "editOnDblClick": false,
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "spark": {
- "id": "spark",
- "name": "spark",
- "group": "spark",
- "properties": {
- "SPARK_HOME": {
- "name": "SPARK_HOME",
- "value": "",
- "type": "string",
- "description": "Location of spark distribution"
- },
- "spark.master": {
- "name": "spark.master",
- "value": "local[*]",
- "type": "string",
- "description": "Spark master uri. local | yarn-client | yarn-cluster | spark master address of standalone mode, ex) spark://master_host:7077"
- },
- "spark.submit.deployMode": {
- "name": "spark.submit.deployMode",
- "value": "",
- "type": "string",
- "description": "The deploy mode of Spark driver program, either \"client\" or \"cluster\", Which means to launch driver program locally (\"client\") or remotely (\"cluster\") on one of the nodes inside the cluster."
- },
- "spark.app.name": {
- "name": "spark.app.name",
- "value": "",
- "type": "string",
- "description": "The name of spark application."
- },
- "spark.driver.cores": {
- "name": "spark.driver.cores",
- "value": "1",
- "type": "number",
- "description": "Number of cores to use for the driver process, only in cluster mode."
- },
- "spark.driver.memory": {
- "name": "spark.driver.memory",
- "value": "1g",
- "type": "string",
- "description": "Amount of memory to use for the driver process, i.e. where SparkContext is initialized, in the same format as JVM memory strings with a size unit suffix (\"k\", \"m\", \"g\" or \"t\") (e.g. 512m, 2g)."
- },
- "spark.executor.cores": {
- "name": "spark.executor.cores",
- "value": "1",
- "type": "number",
- "description": "The number of cores to use on each executor"
- },
- "spark.executor.memory": {
- "name": "spark.executor.memory",
- "value": "1g",
- "type": "string",
- "description": "Executor memory per worker instance. ex) 512m, 32g"
- },
- "spark.executor.instances": {
- "name": "spark.executor.instances",
- "value": "2",
- "type": "number",
- "description": "The number of executors for static allocation."
- },
- "spark.files": {
- "name": "spark.files",
- "value": "",
- "type": "string",
- "description": "Comma-separated list of files to be placed in the working directory of each executor. Globs are allowed."
- },
- "spark.jars": {
- "name": "spark.jars",
- "value": "",
- "type": "string",
- "description": "Comma-separated list of jars to include on the driver and executor classpaths. Globs are allowed."
- },
- "spark.jars.packages": {
- "name": "spark.jars.packages",
- "value": "",
- "type": "string",
- "description": "Comma-separated list of Maven coordinates of jars to include on the driver and executor classpaths. The coordinates should be groupId:artifactId:version. If spark.jars.ivySettings is given artifacts will be resolved according to the configuration in the file, otherwise artifacts will be searched for in the local maven repo, then maven central and finally any additional remote repositories given by the command-line option --repositories."
- },
- "zeppelin.spark.useHiveContext": {
- "name": "zeppelin.spark.useHiveContext",
- "value": true,
- "type": "checkbox",
- "description": "Use HiveContext instead of SQLContext if it is true. Enable hive for SparkSession."
- },
- "zeppelin.spark.run.asLoginUser": {
- "name": "zeppelin.spark.run.asLoginUser",
- "value": true,
- "type": "checkbox",
- "description": "Whether run spark job as the zeppelin login user, it is only applied when running spark job in hadoop yarn cluster and shiro is enabled"
- },
- "zeppelin.spark.printREPLOutput": {
- "name": "zeppelin.spark.printREPLOutput",
- "value": true,
- "type": "checkbox",
- "description": "Print REPL output"
- },
- "zeppelin.spark.maxResult": {
- "name": "zeppelin.spark.maxResult",
- "value": "1000",
- "type": "number",
- "description": "Max number of result to display."
- },
- "zeppelin.spark.enableSupportedVersionCheck": {
- "name": "zeppelin.spark.enableSupportedVersionCheck",
- "value": true,
- "type": "checkbox",
- "description": "Whether checking supported spark version. Developer only setting, not for production use"
- },
- "zeppelin.spark.uiWebUrl": {
- "name": "zeppelin.spark.uiWebUrl",
- "value": "",
- "type": "string",
- "description": "Override Spark UI default URL. In Kubernetes mode, value can be Jinja template string with 3 template variables \u0027PORT\u0027, \u0027SERVICE_NAME\u0027 and \u0027SERVICE_DOMAIN\u0027. (ex: http://{{PORT}}-{{SERVICE_NAME}}.{{SERVICE_DOMAIN}})"
- },
- "zeppelin.spark.ui.hidden": {
- "name": "zeppelin.spark.ui.hidden",
- "value": false,
- "type": "checkbox",
- "description": "Whether hide spark ui in zeppelin ui"
- },
- "spark.webui.yarn.useProxy": {
- "name": "spark.webui.yarn.useProxy",
- "value": false,
- "type": "checkbox",
- "description": "whether use yarn proxy url as spark weburl, e.g. http://localhost:8088/proxy/application_1583396598068_0004"
- },
- "zeppelin.spark.scala.color": {
- "name": "zeppelin.spark.scala.color",
- "value": true,
- "type": "checkbox",
- "description": "Whether enable color output of spark scala interpreter"
- },
- "zeppelin.spark.deprecatedMsg.show": {
- "name": "zeppelin.spark.deprecatedMsg.show",
- "value": true,
- "type": "checkbox",
- "description": "Whether show the spark deprecated message, spark 2.2 and before are deprecated. Zeppelin will display warning message by default"
- },
- "zeppelin.spark.concurrentSQL": {
- "name": "zeppelin.spark.concurrentSQL",
- "value": true,
- "type": "checkbox",
- "description": "Execute multiple SQL concurrently if set true."
- },
- "zeppelin.spark.concurrentSQL.max": {
- "name": "zeppelin.spark.concurrentSQL.max",
- "value": "10",
- "type": "number",
- "description": "Max number of SQL concurrently executed"
- },
- "zeppelin.spark.sql.stacktrace": {
- "name": "zeppelin.spark.sql.stacktrace",
- "value": true,
- "type": "checkbox",
- "description": "Show full exception stacktrace for SQL queries if set to true."
- },
- "zeppelin.spark.sql.interpolation": {
- "name": "zeppelin.spark.sql.interpolation",
- "value": false,
- "type": "checkbox",
- "description": "Enable ZeppelinContext variable interpolation into spark sql"
- },
- "PYSPARK_PYTHON": {
- "name": "PYSPARK_PYTHON",
- "value": "python",
- "type": "string",
- "description": "Python binary executable to use for PySpark in both driver and workers (default is python2.7 if available, otherwise python). Property `spark.pyspark.python` take precedence if it is set"
- },
- "PYSPARK_DRIVER_PYTHON": {
- "name": "PYSPARK_DRIVER_PYTHON",
- "value": "python",
- "type": "string",
- "description": "Python binary executable to use for PySpark in driver only (default is `PYSPARK_PYTHON`). Property `spark.pyspark.driver.python` take precedence if it is set"
- },
- "zeppelin.pyspark.useIPython": {
- "name": "zeppelin.pyspark.useIPython",
- "value": true,
- "type": "checkbox",
- "description": "Whether use IPython when it is available"
- },
- "zeppelin.R.knitr": {
- "name": "zeppelin.R.knitr",
- "value": true,
- "type": "checkbox",
- "description": "Whether use knitr or not"
- },
- "zeppelin.R.cmd": {
- "name": "zeppelin.R.cmd",
- "value": "R",
- "type": "string",
- "description": "R binary executable path"
- },
- "zeppelin.R.image.width": {
- "name": "zeppelin.R.image.width",
- "value": "100%",
- "type": "number",
- "description": "Image width of R plotting"
- },
- "zeppelin.R.render.options": {
- "name": "zeppelin.R.render.options",
- "value": "out.format \u003d \u0027html\u0027, comment \u003d NA, echo \u003d FALSE, results \u003d \u0027asis\u0027, message \u003d F, warning \u003d F, fig.retina \u003d 2",
- "type": "textarea",
- "description": ""
- },
- "zeppelin.R.shiny.portRange": {
- "name": "zeppelin.R.shiny.portRange",
- "value": ":",
- "type": "string",
- "description": "Shiny app would launch a web app at some port, this property is to specify the portRange via format \u0027\u003cstart\u003e:\u003cend\u003e\u0027, e.g. \u00275000:5001\u0027. By default it is \u0027:\u0027 which means any port"
- },
- "zeppelin.kotlin.shortenTypes": {
- "name": "zeppelin.kotlin.shortenTypes",
- "value": true,
- "type": "checkbox",
- "description": "Show short types instead of full, e.g. List\u003cString\u003e or kotlin.collections.List\u003ckotlin.String\u003e"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "spark",
- "class": "org.apache.zeppelin.spark.SparkInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "scala",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- },
- {
- "name": "sql",
- "class": "org.apache.zeppelin.spark.SparkSqlInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sql",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- },
- {
- "name": "pyspark",
- "class": "org.apache.zeppelin.spark.PySparkInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "python",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- },
- {
- "name": "ipyspark",
- "class": "org.apache.zeppelin.spark.IPySparkInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "python",
- "editOnDblClick": false,
- "completionSupport": true,
- "completionKey": "TAB"
- }
- },
- {
- "name": "r",
- "class": "org.apache.zeppelin.spark.SparkRInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "r",
- "editOnDblClick": false,
- "completionSupport": false,
- "completionKey": "TAB"
- }
- },
- {
- "name": "ir",
- "class": "org.apache.zeppelin.spark.SparkIRInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "r",
- "editOnDblClick": false,
- "completionSupport": true,
- "completionKey": "TAB"
- }
- },
- {
- "name": "shiny",
- "class": "org.apache.zeppelin.spark.SparkShinyInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "r",
- "editOnDblClick": false,
- "completionSupport": true,
- "completionKey": "TAB"
- }
- },
- {
- "name": "kotlin",
- "class": "org.apache.zeppelin.spark.KotlinSparkInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "kotlin",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "sh": {
- "id": "sh",
- "name": "sh",
- "group": "sh",
- "properties": {
- "shell.command.timeout.millisecs": {
- "name": "shell.command.timeout.millisecs",
- "value": "60000",
- "type": "number",
- "description": "Shell command time out in millisecs. Default \u003d 60000"
- },
- "shell.command.timeout.check.interval": {
- "name": "shell.command.timeout.check.interval",
- "value": "60000",
- "type": "number",
- "description": "Shell command output check interval in millisecs. Default \u003d 10000"
- },
- "shell.working.directory.user.home": {
- "name": "shell.working.directory.user.home",
- "value": false,
- "type": "checkbox",
- "description": "If this set to true, the shell\u0027s working directory will be set to user home"
- },
- "zeppelin.shell.auth.type": {
- "name": "zeppelin.shell.auth.type",
- "value": "",
- "type": "string",
- "description": "If auth type is needed, Example: KERBEROS"
- },
- "zeppelin.shell.keytab.location": {
- "name": "zeppelin.shell.keytab.location",
- "value": "",
- "type": "string",
- "description": "Kerberos keytab location"
- },
- "zeppelin.shell.principal": {
- "name": "zeppelin.shell.principal",
- "value": "",
- "type": "string",
- "description": "Kerberos principal"
- },
- "zeppelin.shell.interpolation": {
- "name": "zeppelin.shell.interpolation",
- "value": false,
- "type": "checkbox",
- "description": "Enable ZeppelinContext variable interpolation into paragraph text"
- },
- "zeppelin.concurrency.max": {
- "name": "zeppelin.concurrency.max",
- "value": "10",
- "type": "number",
- "description": "Max concurrency of shell interpreter"
- },
- "zeppelin.terminal.ip.mapping": {
- "name": "zeppelin.terminal.ip.mapping",
- "value": "",
- "type": "string",
- "description": "Internal and external IP mapping of zeppelin server"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "sh",
- "class": "org.apache.zeppelin.shell.ShellInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sh",
- "editOnDblClick": false,
- "completionSupport": false
- }
- },
- {
- "name": "terminal",
- "class": "org.apache.zeppelin.shell.TerminalInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sh",
- "editOnDblClick": false,
- "completionSupport": false
- },
- "config": {
- "checkEmpty": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "md": {
- "id": "md",
- "name": "md",
- "group": "md",
- "properties": {
- "markdown.parser.type": {
- "name": "markdown.parser.type",
- "value": "flexmark",
- "type": "string",
- "description": "Markdown Parser Type. Available values: pegdown, markdown4j, flexmark. Default \u003d flexmark"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "md",
- "class": "org.apache.zeppelin.markdown.Markdown",
- "defaultInterpreter": false,
- "editor": {
- "language": "markdown",
- "editOnDblClick": true,
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "hazelcastjet": {
- "id": "hazelcastjet",
- "name": "hazelcastjet",
- "group": "hazelcastjet",
- "properties": {},
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "hazelcastjet",
- "class": "org.apache.zeppelin.hazelcastjet.HazelcastJetInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "java",
- "editOnDblClick": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "alluxio": {
- "id": "alluxio",
- "name": "alluxio",
- "group": "alluxio",
- "properties": {
- "alluxio.master.hostname": {
- "name": "alluxio.master.hostname",
- "value": "localhost",
- "type": "string",
- "description": "Alluxio master hostname"
- },
- "alluxio.master.port": {
- "name": "alluxio.master.port",
- "value": "19998",
- "type": "number",
- "description": "Alluxio master port"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "alluxio",
- "class": "org.apache.zeppelin.alluxio.AlluxioInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": false,
- "completionSupport": true
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "bigquery": {
- "id": "bigquery",
- "name": "bigquery",
- "group": "bigquery",
- "properties": {
- "zeppelin.bigquery.project_id": {
- "name": "zeppelin.bigquery.project_id",
- "value": " ",
- "type": "string",
- "description": "Google Project ID"
- },
- "zeppelin.bigquery.wait_time": {
- "name": "zeppelin.bigquery.wait_time",
- "value": "5000",
- "type": "number",
- "description": "Query timeout in Milliseconds"
- },
- "zeppelin.bigquery.max_no_of_rows": {
- "name": "zeppelin.bigquery.max_no_of_rows",
- "value": "100000",
- "type": "number",
- "description": "Maximum number of rows to fetch from BigQuery"
- },
- "zeppelin.bigquery.sql_dialect": {
- "name": "zeppelin.bigquery.sql_dialect",
- "value": "",
- "type": "string",
- "description": "BigQuery SQL dialect (standardSQL or legacySQL). If empty, query prefix like \u0027#standardSQL\u0027 can be used."
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "sql",
- "class": "org.apache.zeppelin.bigquery.BigQueryInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sql",
- "editOnDblClick": false,
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "mongodb": {
- "id": "mongodb",
- "name": "mongodb",
- "group": "mongodb",
- "properties": {
- "mongo.shell.path": {
- "name": "mongo.shell.path",
- "value": "mongo",
- "type": "string",
- "description": "MongoDB shell local path"
- },
- "mongo.shell.command.table.limit": {
- "name": "mongo.shell.command.table.limit",
- "value": "1000",
- "type": "number",
- "description": "Limit of documents displayed in a table"
- },
- "mongo.shell.command.timeout": {
- "name": "mongo.shell.command.timeout",
- "value": "60000",
- "type": "number",
- "description": "MongoDB shell command timeout"
- },
- "mongo.server.host": {
- "name": "mongo.server.host",
- "value": "localhost",
- "type": "string",
- "description": "MongoDB server host to connect to"
- },
- "mongo.server.port": {
- "name": "mongo.server.port",
- "value": "27017",
- "type": "number",
- "description": "MongoDB server port to connect to"
- },
- "mongo.server.database": {
- "name": "mongo.server.database",
- "value": "test",
- "type": "string",
- "description": "MongoDB database name"
- },
- "mongo.server.authenticationDatabase": {
- "name": "mongo.server.authenticationDatabase",
- "value": "",
- "type": "string",
- "description": "MongoDB database name for authentication"
- },
- "mongo.server.username": {
- "name": "mongo.server.username",
- "value": "",
- "type": "string",
- "description": "Username for authentication"
- },
- "mongo.server.password": {
- "name": "mongo.server.password",
- "value": "",
- "type": "password",
- "description": "Password for authentication"
- },
- "mongo.interpreter.concurrency.max": {
- "name": "mongo.interpreter.concurrency.max",
- "value": "10",
- "type": "number",
- "description": "Max count of scheduler concurrency"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "mongodb",
- "class": "org.apache.zeppelin.mongodb.MongoDbInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "javascript",
- "editOnDblClick": false,
- "completionKey": "TAB"
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "hbase": {
- "id": "hbase",
- "name": "hbase",
- "group": "hbase",
- "properties": {
- "hbase.home": {
- "name": "hbase.home",
- "value": "/usr/lib/hbase/",
- "type": "string",
- "description": "Installation directory of HBase"
- },
- "hbase.ruby.sources": {
- "name": "hbase.ruby.sources",
- "value": "lib/ruby",
- "type": "string",
- "description": "Path to Ruby scripts relative to \u0027hbase.home\u0027"
- },
- "zeppelin.hbase.test.mode": {
- "name": "zeppelin.hbase.test.mode",
- "value": false,
- "type": "checkbox",
- "description": "Disable checks for unit and manual tests"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "hbase",
- "class": "org.apache.zeppelin.hbase.HbaseInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": false,
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "python": {
- "id": "python",
- "name": "python",
- "group": "python",
- "properties": {
- "zeppelin.python": {
- "name": "zeppelin.python",
- "value": "python",
- "type": "string",
- "description": "Python binary executable path. It is set to python by default.(assume python is in your $PATH)"
- },
- "zeppelin.python.maxResult": {
- "name": "zeppelin.python.maxResult",
- "value": "1000",
- "type": "number",
- "description": "Max number of dataframe rows to display."
- },
- "zeppelin.python.useIPython": {
- "name": "zeppelin.python.useIPython",
- "value": true,
- "type": "checkbox",
- "description": "Whether use IPython when it is available in `%python`"
- },
- "zeppelin.ipython.launch.timeout": {
- "name": "zeppelin.ipython.launch.timeout",
- "value": "30000",
- "type": "number",
- "description": "Time out for ipython launch"
- },
- "zeppelin.ipython.grpc.message_size": {
- "name": "zeppelin.ipython.grpc.message_size",
- "value": "33554432",
- "type": "number",
- "description": "grpc message size, default is 32M"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "python",
- "class": "org.apache.zeppelin.python.PythonInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "python",
- "editOnDblClick": false,
- "completionSupport": true
- }
- },
- {
- "name": "ipython",
- "class": "org.apache.zeppelin.python.IPythonInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "python",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- },
- {
- "name": "sql",
- "class": "org.apache.zeppelin.python.PythonInterpreterPandasSql",
- "defaultInterpreter": false,
- "editor": {
- "language": "sql",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": false
- }
- },
- {
- "name": "conda",
- "class": "org.apache.zeppelin.python.PythonCondaInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sh",
- "editOnDblClick": false,
- "completionSupport": false
- }
- },
- {
- "name": "docker",
- "class": "org.apache.zeppelin.python.PythonDockerInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sh",
- "editOnDblClick": false,
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "sap": {
- "id": "sap",
- "name": "sap",
- "group": "sap",
- "properties": {
- "universe.api.url": {
- "name": "universe.api.url",
- "value": "http://localhost:6405/biprws",
- "type": "url",
- "description": "API url of Universe"
- },
- "universe.user": {
- "name": "universe.user",
- "value": "",
- "type": "string",
- "description": "Username for API of Universe"
- },
- "universe.password": {
- "name": "universe.password",
- "value": "",
- "type": "password",
- "description": "Password for API of Universe"
- },
- "universe.authType": {
- "name": "universe.authType",
- "value": "secEnterprise",
- "type": "string",
- "description": "Type of authentication for API of Universe. Available values: secEnterprise, secLDAP, secWinAD, secSAPR3"
- },
- "universe.queryTimeout": {
- "name": "universe.queryTimeout",
- "value": "7200000",
- "type": "number",
- "description": "Query timeout for API of Universe"
- },
- "universe.interpolation": {
- "name": "universe.interpolation",
- "value": false,
- "type": "checkbox",
- "description": "Enable ZeppelinContext variable interpolation into paragraph text"
- },
- "universe.concurrent.use": {
- "name": "universe.concurrent.use",
- "value": true,
- "type": "checkbox",
- "description": "Use parallel scheduler"
- },
- "universe.concurrent.maxConnection": {
- "name": "universe.concurrent.maxConnection",
- "value": "10",
- "type": "number",
- "description": "Number of concurrent execution"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "universe",
- "class": "org.apache.zeppelin.sap.UniverseInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "kotlin": {
- "id": "kotlin",
- "name": "kotlin",
- "group": "kotlin",
- "properties": {
- "zeppelin.kotlin.maxResult": {
- "name": "zeppelin.kotlin.maxResult",
- "value": "1000",
- "type": "number",
- "description": "Max number of dataframe rows to display."
- },
- "zeppelin.kotlin.shortenTypes": {
- "name": "zeppelin.kotlin.shortenTypes",
- "value": true,
- "type": "checkbox",
- "description": "Show short types instead of full, e.g. List\u003cString\u003e or kotlin.collections.List\u003ckotlin.String\u003e"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "kotlin",
- "class": "org.apache.zeppelin.kotlin.KotlinInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "kotlin",
- "editOnDblClick": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "flink-cmd": {
- "id": "flink-cmd",
- "name": "flink-cmd",
- "group": "flink-cmd",
- "properties": {
- "FLINK_HOME": {
- "name": "FLINK_HOME",
- "value": "",
- "type": "string",
- "description": "Location of flink distribution"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "cmd",
- "class": "org.apache.zeppelin.flink.cmd.FlinkCmdInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sh",
- "editOnDblClick": false,
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "scalding": {
- "id": "scalding",
- "name": "scalding",
- "group": "scalding",
- "properties": {
- "args.string": {
- "name": "args.string",
- "value": "--local --repl",
- "type": "textarea",
- "description": "Arguments for scalding REPL"
- },
- "max.open.instances": {
- "name": "max.open.instances",
- "value": "50",
- "type": "number",
- "description": "Maximum number of open interpreter instances"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "scalding",
- "class": "org.apache.zeppelin.scalding.ScaldingInterpreter",
- "defaultInterpreter": false
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "flink": {
- "id": "flink",
- "name": "flink",
- "group": "flink",
- "properties": {
- "FLINK_HOME": {
- "name": "FLINK_HOME",
- "value": "",
- "type": "string",
- "description": "Location of flink distribution"
- },
- "HADOOP_CONF_DIR": {
- "name": "HADOOP_CONF_DIR",
- "value": "",
- "type": "string",
- "description": "Location of hadoop conf (core-site.xml, hdfs-site.xml and etc.)"
- },
- "HIVE_CONF_DIR": {
- "name": "HIVE_CONF_DIR",
- "value": "",
- "type": "string",
- "description": "Location of hive conf (hive-site.xml)"
- },
- "flink.execution.mode": {
- "name": "flink.execution.mode",
- "value": "local",
- "type": "string",
- "description": "Execution mode, it could be local|remote|yarn"
- },
- "flink.execution.remote.host": {
- "name": "flink.execution.remote.host",
- "value": "",
- "type": "string",
- "description": "Host name of running JobManager. Only used for remote mode"
- },
- "flink.execution.remote.port": {
- "name": "flink.execution.remote.port",
- "value": "",
- "type": "number",
- "description": "Port of running JobManager. Only used for remote mode"
- },
- "jobmanager.memory.process.size": {
- "name": "jobmanager.memory.process.size",
- "value": "1024m",
- "type": "text",
- "description": "Memory for JobManager, e.g. 1024m"
- },
- "taskmanager.memory.process.size": {
- "name": "taskmanager.memory.process.size",
- "value": "1024m",
- "type": "text",
- "description": "Memory for TaskManager, e.g. 1024m"
- },
- "taskmanager.numberOfTaskSlots": {
- "name": "taskmanager.numberOfTaskSlots",
- "value": "1",
- "type": "number",
- "description": "Number of slot per TaskManager"
- },
- "local.number-taskmanager": {
- "name": "local.number-taskmanager",
- "value": "4",
- "type": "number",
- "description": "Number of TaskManager in local mode"
- },
- "yarn.application.name": {
- "name": "yarn.application.name",
- "value": "Zeppelin Flink Session",
- "type": "string",
- "description": "Yarn app name"
- },
- "yarn.application.queue": {
- "name": "yarn.application.queue",
- "value": "default",
- "type": "string",
- "description": "Yarn queue name"
- },
- "zeppelin.flink.uiWebUrl": {
- "name": "zeppelin.flink.uiWebUrl",
- "value": "",
- "type": "string",
- "description": "User specified Flink JobManager url, it could be used in remote mode where Flink cluster is already started, or could be used as url template, e.g. https://knox-server:8443/gateway/cluster-topo/yarn/proxy/{{applicationId}}/ where {{applicationId}} would be replaced with yarn app id"
- },
- "zeppelin.flink.run.asLoginUser": {
- "name": "zeppelin.flink.run.asLoginUser",
- "value": true,
- "type": "checkbox",
- "description": "Whether run flink job as the zeppelin login user, it is only applied when running flink job in hadoop yarn cluster and shiro is enabled"
- },
- "flink.udf.jars": {
- "name": "flink.udf.jars",
- "value": "",
- "type": "string",
- "description": "Flink udf jars (comma separated), Zeppelin will register udfs in this jar for user automatically, these udf jars could be either local files or hdfs files if you have hadoop installed, the udf name is the class name"
- },
- "flink.udf.jars.packages": {
- "name": "flink.udf.jars.packages",
- "value": "",
- "type": "string",
- "description": "Packages (comma separated) that would be searched for the udf defined in `flink.udf.jars`"
- },
- "flink.execution.jars": {
- "name": "flink.execution.jars",
- "value": "",
- "type": "string",
- "description": "Additional user jars (comma separated), these jars could be either local files or hdfs files if you have hadoop installed"
- },
- "flink.execution.packages": {
- "name": "flink.execution.packages",
- "value": "",
- "type": "string",
- "description": "Additional user packages (comma separated), e.g. flink connector packages"
- },
- "zeppelin.flink.scala.color": {
- "name": "zeppelin.flink.scala.color",
- "value": true,
- "type": "checkbox",
- "description": "Whether display scala shell output in colorful format"
- },
- "zeppelin.flink.enableHive": {
- "name": "zeppelin.flink.enableHive",
- "value": false,
- "type": "checkbox",
- "description": "Whether enable hive"
- },
- "zeppelin.flink.hive.version": {
- "name": "zeppelin.flink.hive.version",
- "value": "2.3.4",
- "type": "string",
- "description": "Hive version that you would like to connect"
- },
- "zeppelin.flink.module.enableHive": {
- "name": "zeppelin.flink.module.enableHive",
- "value": false,
- "type": "checkbox",
- "description": "Whether enable hive module, hive udf take precedence over flink udf if hive module is enabled."
- },
- "zeppelin.flink.printREPLOutput": {
- "name": "zeppelin.flink.printREPLOutput",
- "value": true,
- "type": "checkbox",
- "description": "Print REPL output"
- },
- "zeppelin.flink.maxResult": {
- "name": "zeppelin.flink.maxResult",
- "value": "1000",
- "type": "number",
- "description": "Max number of rows returned by sql interpreter."
- },
- "zeppelin.pyflink.python": {
- "name": "zeppelin.pyflink.python",
- "value": "python",
- "type": "string",
- "description": "Python executable for pyflink"
- },
- "flink.interpreter.close.shutdown_cluster": {
- "name": "flink.interpreter.close.shutdown_cluster",
- "value": true,
- "type": "checkbox",
- "description": "Whether shutdown flink cluster when close interpreter"
- },
- "zeppelin.interpreter.close.cancel_job": {
- "name": "zeppelin.interpreter.close.cancel_job",
- "value": true,
- "type": "checkbox",
- "description": "Whether cancel flink job when closing interpreter"
- },
- "zeppelin.flink.job.check_interval": {
- "name": "zeppelin.flink.job.check_interval",
- "value": "1000",
- "type": "number",
- "description": "Check interval (in milliseconds) to check flink job progress"
- },
- "zeppelin.flink.concurrentBatchSql.max": {
- "name": "zeppelin.flink.concurrentBatchSql.max",
- "value": "10",
- "type": "number",
- "description": "Max concurrent sql of Batch Sql"
- },
- "zeppelin.flink.concurrentStreamSql.max": {
- "name": "zeppelin.flink.concurrentStreamSql.max",
- "value": "10",
- "type": "number",
- "description": "Max concurrent sql of Stream Sql"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "flink",
- "class": "org.apache.zeppelin.flink.FlinkInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "scala",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- },
- {
- "name": "bsql",
- "class": "org.apache.zeppelin.flink.FlinkBatchSqlInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sql",
- "editOnDblClick": false
- }
- },
- {
- "name": "ssql",
- "class": "org.apache.zeppelin.flink.FlinkStreamSqlInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sql",
- "editOnDblClick": false
- }
- },
- {
- "name": "pyflink",
- "class": "org.apache.zeppelin.flink.PyFlinkInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "python",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- },
- {
- "name": "ipyflink",
- "class": "org.apache.zeppelin.flink.IPyFlinkInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "python",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "angular": {
- "id": "angular",
- "name": "angular",
- "group": "angular",
- "properties": {},
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "angular",
- "class": "org.apache.zeppelin.angular.AngularInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": true,
- "completionSupport": false
- }
- },
- {
- "name": "ng",
- "class": "org.apache.zeppelin.angular.AngularInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": true,
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "ksql": {
- "id": "ksql",
- "name": "ksql",
- "group": "ksql",
- "properties": {
- "ksql.url": {
- "name": "ksql.url",
- "value": "http://localhost:8088",
- "type": "string",
- "description": "KSQL Endpoint base URL"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "ksql",
- "class": "org.apache.zeppelin.ksql.KSQLInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sql",
- "editOnDblClick": false,
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "r": {
- "id": "r",
- "name": "r",
- "group": "r",
- "properties": {
- "zeppelin.R.knitr": {
- "name": "zeppelin.R.knitr",
- "value": true,
- "type": "checkbox",
- "description": "Whether use knitr or not"
- },
- "zeppelin.R.cmd": {
- "name": "zeppelin.R.cmd",
- "value": "R",
- "type": "string",
- "description": "R binary executable path"
- },
- "zeppelin.R.maxResult": {
- "name": "zeppelin.R.maxResult",
- "value": "1000",
- "type": "number",
- "description": "Max number of dataframe rows to display."
- },
- "zeppelin.R.image.width": {
- "name": "zeppelin.R.image.width",
- "value": "100%",
- "type": "number",
- "description": "Image width of R plotting"
- },
- "zeppelin.R.render.options": {
- "name": "zeppelin.R.render.options",
- "value": "out.format \u003d \u0027html\u0027, comment \u003d NA, echo \u003d FALSE, results \u003d \u0027asis\u0027, message \u003d F, warning \u003d F, fig.retina \u003d 2",
- "type": "textarea",
- "description": ""
- },
- "zeppelin.R.shiny.portRange": {
- "name": "zeppelin.R.shiny.portRange",
- "value": ":",
- "type": "string",
- "description": "Shiny app would launch a web app at some port, this property is to specify the portRange via format \u0027\u003cstart\u003e:\u003cend\u003e\u0027, e.g. \u00275000:5001\u0027. By default it is \u0027:\u0027 which means any port"
- },
- "zeppelin.R.shiny.iframe_width": {
- "name": "zeppelin.R.shiny.iframe_width",
- "value": "100%",
- "type": "text",
- "description": "Width of iframe of R shiny app"
- },
- "zeppelin.R.shiny.iframe_height": {
- "name": "zeppelin.R.shiny.iframe_height",
- "value": "500px",
- "type": "text",
- "description": "Height of iframe of R shiny app"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "r",
- "class": "org.apache.zeppelin.r.RInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "r",
- "editOnDblClick": false,
- "completionSupport": true
- }
- },
- {
- "name": "ir",
- "class": "org.apache.zeppelin.r.IRInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "r",
- "editOnDblClick": false,
- "completionSupport": true
- }
- },
- {
- "name": "shiny",
- "class": "org.apache.zeppelin.r.ShinyInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "r",
- "editOnDblClick": false,
- "completionSupport": true
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "livy": {
- "id": "livy",
- "name": "livy",
- "group": "livy",
- "properties": {
- "zeppelin.livy.url": {
- "name": "zeppelin.livy.url",
- "value": "http://localhost:8998",
- "type": "url",
- "description": "The URL for Livy Server."
- },
- "zeppelin.livy.session.create_timeout": {
- "name": "zeppelin.livy.session.create_timeout",
- "value": "120",
- "type": "number",
- "description": "Livy Server create session timeout (seconds)."
- },
- "livy.spark.driver.cores": {
- "name": "livy.spark.driver.cores",
- "value": "",
- "type": "number",
- "description": "Driver cores. ex) 1, 2"
- },
- "livy.spark.driver.memory": {
- "name": "livy.spark.driver.memory",
- "value": "",
- "type": "string",
- "description": "Driver memory. ex) 512m, 32g"
- },
- "livy.spark.executor.instances": {
- "name": "livy.spark.executor.instances",
- "value": "",
- "type": "number",
- "description": "Executor instances. ex) 1, 4"
- },
- "livy.spark.executor.cores": {
- "name": "livy.spark.executor.cores",
- "value": "",
- "type": "number",
- "description": "Num cores per executor. ex) 1, 4"
- },
- "livy.spark.executor.memory": {
- "name": "livy.spark.executor.memory",
- "value": "",
- "type": "string",
- "description": "Executor memory per worker instance. ex) 512m, 32g"
- },
- "livy.spark.dynamicAllocation.enabled": {
- "name": "livy.spark.dynamicAllocation.enabled",
- "value": false,
- "type": "checkbox",
- "description": "Use dynamic resource allocation"
- },
- "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": {
- "name": "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout",
- "value": "",
- "type": "string",
- "description": "Remove an executor which has cached data blocks"
- },
- "livy.spark.dynamicAllocation.minExecutors": {
- "name": "livy.spark.dynamicAllocation.minExecutors",
- "value": "",
- "type": "number",
- "description": "Lower bound for the number of executors if dynamic allocation is enabled."
- },
- "livy.spark.dynamicAllocation.initialExecutors": {
- "name": "livy.spark.dynamicAllocation.initialExecutors",
- "value": "",
- "type": "number",
- "description": "Initial number of executors to run if dynamic allocation is enabled."
- },
- "livy.spark.dynamicAllocation.maxExecutors": {
- "name": "livy.spark.dynamicAllocation.maxExecutors",
- "value": "",
- "type": "number",
- "description": "Upper bound for the number of executors if dynamic allocation is enabled."
- },
- "zeppelin.livy.principal": {
- "name": "zeppelin.livy.principal",
- "value": "",
- "type": "string",
- "description": "Kerberos principal to authenticate livy"
- },
- "zeppelin.livy.keytab": {
- "name": "zeppelin.livy.keytab",
- "value": "",
- "type": "textarea",
- "description": "Kerberos keytab to authenticate livy"
- },
- "zeppelin.livy.pull_status.interval.millis": {
- "name": "zeppelin.livy.pull_status.interval.millis",
- "value": "1000",
- "type": "number",
- "description": "The interval for checking paragraph execution status"
- },
- "zeppelin.livy.maxLogLines": {
- "name": "zeppelin.livy.maxLogLines",
- "value": "1000",
- "type": "number",
- "description": "Max number of lines of logs"
- },
- "livy.spark.jars.packages": {
- "name": "livy.spark.jars.packages",
- "value": "",
- "type": "textarea",
- "description": "Adding extra libraries to livy interpreter"
- },
- "zeppelin.livy.displayAppInfo": {
- "name": "zeppelin.livy.displayAppInfo",
- "value": true,
- "type": "checkbox",
- "description": "Whether display app info"
- },
- "zeppelin.livy.restart_dead_session": {
- "name": "zeppelin.livy.restart_dead_session",
- "value": false,
- "type": "checkbox",
- "description": "Whether restart a dead session"
- },
- "zeppelin.livy.spark.sql.maxResult": {
- "name": "zeppelin.livy.spark.sql.maxResult",
- "value": "1000",
- "type": "number",
- "description": "Max number of Spark SQL result to display."
- },
- "zeppelin.livy.spark.sql.field.truncate": {
- "name": "zeppelin.livy.spark.sql.field.truncate",
- "value": true,
- "type": "checkbox",
- "description": "If true, truncate field values longer than 20 characters."
- },
- "zeppelin.livy.concurrentSQL": {
- "name": "zeppelin.livy.concurrentSQL",
- "value": false,
- "type": "checkbox",
- "description": "Execute multiple SQL concurrently if set true."
- },
- "zeppelin.livy.tableWithUTFCharacter": {
- "name": "zeppelin.livy.tableWithUTFCharacter",
- "value": false,
- "type": "checkbox",
- "description": "If database contains UTF characters then set this as true."
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "spark",
- "class": "org.apache.zeppelin.livy.LivySparkInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "scala",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- },
- {
- "name": "sql",
- "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sql",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- },
- {
- "name": "pyspark",
- "class": "org.apache.zeppelin.livy.LivyPySparkInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "python",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- },
- {
- "name": "pyspark3",
- "class": "org.apache.zeppelin.livy.LivyPySpark3Interpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "python",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- },
- {
- "name": "sparkr",
- "class": "org.apache.zeppelin.livy.LivySparkRInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "r",
- "editOnDblClick": false,
- "completionKey": "TAB",
- "completionSupport": true
- }
- },
- {
- "name": "shared",
- "class": "org.apache.zeppelin.livy.LivySharedInterpreter",
- "defaultInterpreter": false
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "perNote": "shared",
- "perUser": "scoped",
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "neo4j": {
- "id": "neo4j",
- "name": "neo4j",
- "group": "neo4j",
- "properties": {
- "neo4j.url": {
- "name": "neo4j.url",
- "value": "bolt://localhost:7687",
- "type": "string",
- "description": "The Neo4j\u0027s BOLT url."
- },
- "neo4j.database": {
- "name": "neo4j.database",
- "value": "",
- "type": "string",
- "description": "The Neo4j target database, if empty use the dafault db."
- },
- "neo4j.multi.statement": {
- "name": "neo4j.multi.statement",
- "value": "true",
- "type": "string",
- "description": "Enables the multi statement management, if true it computes multiple queries separated by semicolon."
- },
- "neo4j.auth.type": {
- "name": "neo4j.auth.type",
- "value": "BASIC",
- "type": "string",
- "description": "The Neo4j\u0027s authentication type (NONE, BASIC)."
- },
- "neo4j.auth.user": {
- "name": "neo4j.auth.user",
- "value": "",
- "type": "string",
- "description": "The Neo4j user name."
- },
- "neo4j.auth.password": {
- "name": "neo4j.auth.password",
- "value": "",
- "type": "string",
- "description": "The Neo4j user password."
- },
- "neo4j.max.concurrency": {
- "name": "neo4j.max.concurrency",
- "value": "50",
- "type": "string",
- "description": "Max concurrency call from Zeppelin to Neo4j server."
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "neo4j",
- "class": "org.apache.zeppelin.graph.neo4j.Neo4jCypherInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "groovy": {
- "id": "groovy",
- "name": "groovy",
- "group": "groovy",
- "properties": {
- "GROOVY_CLASSES": {
- "name": "GROOVY_CLASSES",
- "value": "",
- "type": "textarea",
- "description": "The path for custom groovy classes location. If empty `./interpreter/groovy/classes`"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "groovy",
- "class": "org.apache.zeppelin.groovy.GroovyInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": false,
- "completionSupport": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "cassandra": {
- "id": "cassandra",
- "name": "cassandra",
- "group": "cassandra",
- "properties": {
- "cassandra.hosts": {
- "name": "cassandra.hosts",
- "value": "localhost",
- "type": "textarea",
- "description": "Comma separated Cassandra hosts (DNS name or IP address). Default \u003d localhost. Ex: \u0027192.168.0.12,node2,node3\u0027"
- },
- "cassandra.native.port": {
- "name": "cassandra.native.port",
- "value": "9042",
- "type": "number",
- "description": "Cassandra native port. Default \u003d 9042"
- },
- "cassandra.protocol.version": {
- "name": "cassandra.protocol.version",
- "value": "DEFAULT",
- "type": "string",
- "description": "Cassandra protocol version. Default \u003d auto-detect"
- },
- "cassandra.cluster": {
- "name": "cassandra.cluster",
- "value": "Test Cluster",
- "type": "string",
- "description": "Cassandra cluster name. Default \u003d \u0027Test Cluster\u0027"
- },
- "cassandra.keyspace": {
- "name": "cassandra.keyspace",
- "value": "system",
- "type": "string",
- "description": "Cassandra keyspace name. Default \u003d \u0027system\u0027"
- },
- "cassandra.compression.protocol": {
- "name": "cassandra.compression.protocol",
- "value": "NONE",
- "type": "string",
- "description": "Cassandra compression protocol. Available values: NONE, SNAPPY, LZ4. Default \u003d NONE"
- },
- "cassandra.credentials.username": {
- "name": "cassandra.credentials.username",
- "value": "none",
- "type": "string",
- "description": "Cassandra credentials username. Default \u003d \u0027none\u0027"
- },
- "cassandra.credentials.password": {
- "name": "cassandra.credentials.password",
- "value": "none",
- "type": "password",
- "description": "Cassandra credentials password. Default \u003d \u0027none\u0027"
- },
- "cassandra.load.balancing.policy": {
- "name": "cassandra.load.balancing.policy",
- "value": "DEFAULT",
- "type": "string",
- "description": "Class name for Load Balancing Policy. Default \u003d DefaultLoadBalancingPolicy"
- },
- "cassandra.retry.policy": {
- "name": "cassandra.retry.policy",
- "value": "DEFAULT",
- "type": "string",
- "description": "Class name for Retry Policy. Default \u003d DefaultRetryPolicy"
- },
- "cassandra.reconnection.policy": {
- "name": "cassandra.reconnection.policy",
- "value": "DEFAULT",
- "type": "string",
- "description": "Class name for Reconnection Policy. Default \u003d ExponentialReconnectionPolicy"
- },
- "cassandra.speculative.execution.policy": {
- "name": "cassandra.speculative.execution.policy",
- "value": "DEFAULT",
- "type": "string",
- "description": "Class name for Speculative Execution Policy. Default \u003d NoSpeculativeExecutionPolicy"
- },
- "cassandra.interpreter.parallelism": {
- "name": "cassandra.interpreter.parallelism",
- "value": "10",
- "type": "number",
- "description": "Cassandra interpreter parallelism.Default \u003d 10"
- },
- "cassandra.max.schema.agreement.wait.second": {
- "name": "cassandra.max.schema.agreement.wait.second",
- "value": "10",
- "type": "number",
- "description": "Cassandra max schema agreement wait in second.Default \u003d ProtocolOptions.DEFAULT_MAX_SCHEMA_AGREEMENT_WAIT_SECONDS"
- },
- "cassandra.pooling.connection.per.host.local": {
- "name": "cassandra.pooling.connection.per.host.local",
- "value": "1",
- "type": "number",
- "description": "Cassandra connections per host local. Protocol V3 and above default \u003d 1"
- },
- "cassandra.pooling.connection.per.host.remote": {
- "name": "cassandra.pooling.connection.per.host.remote",
- "value": "1",
- "type": "number",
- "description": "Cassandra connections per host remote. Protocol V3 and above default \u003d 1"
- },
- "cassandra.pooling.max.request.per.connection": {
- "name": "cassandra.pooling.max.request.per.connection",
- "value": "1024",
- "type": "number",
- "description": "Cassandra max requests per connection. Protocol V3 and above default \u003d 1024"
- },
- "cassandra.pooling.pool.timeout.millisecs": {
- "name": "cassandra.pooling.pool.timeout.millisecs",
- "value": "5000",
- "type": "number",
- "description": "Cassandra pool time out in millisecs. Default \u003d 5000"
- },
- "cassandra.pooling.heartbeat.interval.seconds": {
- "name": "cassandra.pooling.heartbeat.interval.seconds",
- "value": "30",
- "type": "number",
- "description": "Cassandra pool heartbeat interval in secs. Default \u003d 30"
- },
- "cassandra.query.default.consistency": {
- "name": "cassandra.query.default.consistency",
- "value": "ONE",
- "type": "string",
- "description": "Cassandra query default consistency level. Default \u003d ONE"
- },
- "cassandra.query.default.serial.consistency": {
- "name": "cassandra.query.default.serial.consistency",
- "value": "SERIAL",
- "type": "string",
- "description": "Cassandra query default serial consistency level. Default \u003d SERIAL"
- },
- "cassandra.query.default.fetchSize": {
- "name": "cassandra.query.default.fetchSize",
- "value": "5000",
- "type": "number",
- "description": "Cassandra query default fetch size. Default \u003d 5000"
- },
- "cassandra.socket.connection.timeout.millisecs": {
- "name": "cassandra.socket.connection.timeout.millisecs",
- "value": "5000",
- "type": "number",
- "description": "Cassandra socket default connection timeout in millisecs. Default \u003d 5000"
- },
- "cassandra.socket.read.timeout.millisecs": {
- "name": "cassandra.socket.read.timeout.millisecs",
- "value": "12000",
- "type": "number",
- "description": "Cassandra socket read timeout in millisecs. Default \u003d 12000"
- },
- "cassandra.socket.tcp.no_delay": {
- "name": "cassandra.socket.tcp.no_delay",
- "value": true,
- "type": "checkbox",
- "description": "Cassandra socket TCP no delay. Default \u003d true"
- },
- "cassandra.ssl.enabled": {
- "name": "cassandra.ssl.enabled",
- "value": false,
- "type": "checkbox",
- "description": "Cassandra SSL"
- },
- "cassandra.ssl.truststore.path": {
- "name": "cassandra.ssl.truststore.path",
- "value": "none",
- "type": "string",
- "description": "Cassandra truststore path. Default \u003d none"
- },
- "cassandra.ssl.truststore.password": {
- "name": "cassandra.ssl.truststore.password",
- "value": "none",
- "type": "password",
- "description": "Cassandra truststore password. Default \u003d none"
- },
- "cassandra.format.output": {
- "name": "cassandra.format.output",
- "value": "human",
- "type": "string",
- "description": "Output format: human-readable, or strict CQL. Default \u003d human"
- },
- "cassandra.format.locale": {
- "name": "cassandra.format.locale",
- "value": "en_US",
- "type": "string",
- "description": "Locale for formatting of output data. Default \u003d en_US"
- },
- "cassandra.format.timezone": {
- "name": "cassandra.format.timezone",
- "value": "UTC",
- "type": "string",
- "description": "Timezone for output of time/date-related values. Default \u003d UTC"
- },
- "cassandra.format.timestamp": {
- "name": "cassandra.format.timestamp",
- "value": "yyyy-MM-dd\u0027T\u0027HH:mm:ss.SSSXXX",
- "type": "string",
- "description": "Format string for timestamp columns"
- },
- "cassandra.format.date": {
- "name": "cassandra.format.date",
- "value": "yyyy-MM-dd",
- "type": "string",
- "description": "Format string for date columns"
- },
- "cassandra.format.time": {
- "name": "cassandra.format.time",
- "value": "HH:mm:ss.SSS",
- "type": "string",
- "description": "Format string for time columns"
- },
- "cassandra.format.float_precision": {
- "name": "cassandra.format.float_precision",
- "value": "5",
- "type": "number",
- "description": "Precision for formatting of float values"
- },
- "cassandra.format.double_precision": {
- "name": "cassandra.format.double_precision",
- "value": "12",
- "type": "number",
- "description": "Precision for formatting of double values"
- },
- "cassandra.format.decimal_precision": {
- "name": "cassandra.format.decimal_precision",
- "value": "-1",
- "type": "number",
- "description": "Precision for formatting of decimal values (by default, show everything)"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "cassandra",
- "class": "org.apache.zeppelin.cassandra.CassandraInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "editOnDblClick": false
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "sparql": {
- "id": "sparql",
- "name": "sparql",
- "group": "sparql",
- "properties": {
- "sparql.engine": {
- "name": "sparql.engine",
- "value": "jena",
- "type": "string",
- "description": "The sparql engine to use for the queries. Default: jena"
- },
- "sparql.endpoint": {
- "name": "sparql.endpoint",
- "value": "http://dbpedia.org/sparql",
- "type": "string",
- "description": "Complete URL of the endpoint. Default: http://dbpedia.org/sparql"
- },
- "sparql.replaceURIs": {
- "name": "sparql.replaceURIs",
- "value": true,
- "type": "checkbox",
- "description": "Replace the URIs in the result with the prefixes. Default: true"
- },
- "sparql.removeDatatypes": {
- "name": "sparql.removeDatatypes",
- "value": true,
- "type": "checkbox",
- "description": "Remove the datatypes from Literals so Zeppelin can use the values. Default: true"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "sparql",
- "class": "org.apache.zeppelin.sparql.SparqlInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sparql",
- "editOnDblClick": false,
- "completionKey": "TAB"
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- },
- "kylin": {
- "id": "kylin",
- "name": "kylin",
- "group": "kylin",
- "properties": {
- "kylin.api.url": {
- "name": "kylin.api.url",
- "value": "http://localhost:7070/kylin/api/query",
- "type": "url",
- "description": "Kylin API"
- },
- "kylin.api.user": {
- "name": "kylin.api.user",
- "value": "ADMIN",
- "type": "string",
- "description": "Kylin username"
- },
- "kylin.api.password": {
- "name": "kylin.api.password",
- "value": "KYLIN",
- "type": "password",
- "description": "Kylin password"
- },
- "kylin.query.project": {
- "name": "kylin.query.project",
- "value": "learn_kylin",
- "type": "textarea",
- "description": "Default Kylin project name"
- },
- "kylin.query.offset": {
- "name": "kylin.query.offset",
- "value": "0",
- "type": "number",
- "description": "Kylin query offset"
- },
- "kylin.query.limit": {
- "name": "kylin.query.limit",
- "value": "5000",
- "type": "number",
- "description": "Kylin query limit"
- },
- "kylin.query.ispartial": {
- "name": "kylin.query.ispartial",
- "value": true,
- "type": "checkbox",
- "description": "Kylin query partial flag, deprecated"
- }
- },
- "status": "READY",
- "interpreterGroup": [
- {
- "name": "kylin",
- "class": "org.apache.zeppelin.kylin.KylinInterpreter",
- "defaultInterpreter": false,
- "editor": {
- "language": "sql",
- "editOnDblClick": false,
- "completionSupport": true
- }
- }
- ],
- "dependencies": [],
- "option": {
- "remote": true,
- "port": -1,
- "isExistingProcess": false,
- "setPermission": false,
- "owners": [],
- "isUserImpersonate": false
- }
- }
- },
- "interpreterRepositories": [
- {
- "id": "central",
- "type": "default",
- "url": "https://repo1.maven.org/maven2/",
- "host": "repo1.maven.org",
- "protocol": "https",
- "releasePolicy": {
- "enabled": true,
- "updatePolicy": "daily",
- "checksumPolicy": "warn"
- },
- "snapshotPolicy": {
- "enabled": true,
- "updatePolicy": "daily",
- "checksumPolicy": "warn"
- },
- "mirroredRepositories": [],
- "repositoryManager": false
- },
- {
- "id": "local",
- "type": "default",
- "url": "file:///opt/zeppelin/.m2/repository",
- "host": "",
- "protocol": "file",
- "releasePolicy": {
- "enabled": true,
- "updatePolicy": "daily",
- "checksumPolicy": "warn"
- },
- "snapshotPolicy": {
- "enabled": true,
- "updatePolicy": "daily",
- "checksumPolicy": "warn"
- },
- "mirroredRepositories": [],
- "repositoryManager": false
- }
- ]
-}