ATLAS-4097: update zookeeper download url for recent zookeeper version upgrade
This commit is contained in:
parent
f0bb4e94ca
commit
907eccdfb3
|
|
@ -182,8 +182,8 @@ atlas.graph.storage.lock.wait-time=300
|
|||
<solr.folder>solr-${solr.version}</solr.folder>
|
||||
<solr.tar>https://archive.apache.org/dist/lucene/solr/${solr.version}/solr-${solr.version}.tgz</solr.tar>
|
||||
<zk.dir>${project.build.directory}/zk</zk.dir>
|
||||
<zk.folder>zookeeper-${zookeeper.version}</zk.folder>
|
||||
<zk.tar>https://archive.apache.org/dist/zookeeper/zookeeper-${zookeeper.version}/zookeeper-${zookeeper.version}.tar.gz</zk.tar>
|
||||
<zk.folder>apache-zookeeper-${zookeeper.version}-bin</zk.folder>
|
||||
<zk.tar>https://archive.apache.org/dist/zookeeper/zookeeper-${zookeeper.version}/apache-zookeeper-${zookeeper.version}-bin.tar.gz</zk.tar>
|
||||
</properties>
|
||||
<build>
|
||||
<plugins>
|
||||
|
|
@ -429,8 +429,8 @@ atlas.graph.storage.conf-file=${sys:atlas.home}/conf/cassandra.yml
|
|||
<solr.folder>solr-${solr.version}</solr.folder>
|
||||
<solr.tar>https://archive.apache.org/dist/lucene/solr/${solr.version}/solr-${solr.version}.tgz</solr.tar>
|
||||
<zk.dir>${project.build.directory}/zk</zk.dir>
|
||||
<zk.folder>zookeeper-${zookeeper.version}</zk.folder>
|
||||
<zk.tar>https://archive.apache.org/dist/zookeeper/zookeeper-${zookeeper.version}/zookeeper-${zookeeper.version}.tar.gz</zk.tar>
|
||||
<zk.folder>apache-zookeeper-${zookeeper.version}-bin</zk.folder>
|
||||
<zk.tar>https://archive.apache.org/dist/zookeeper/zookeeper-${zookeeper.version}/apache-zookeeper-${zookeeper.version}-bin.tar.gz</zk.tar>
|
||||
</properties>
|
||||
<build>
|
||||
<plugins>
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ if __name__ == '__main__':
|
|||
try:
|
||||
returncode = main()
|
||||
except Exception as e:
|
||||
print "Exception: %s " % str(e)
|
||||
print("Exception: %s " % str(e))
|
||||
returncode = -1
|
||||
|
||||
sys.exit(returncode)
|
||||
|
|
|
|||
|
|
@ -186,7 +186,7 @@ def executeEnvSh(confDir):
|
|||
proc = subprocess.Popen(command, stdout = subprocess.PIPE)
|
||||
|
||||
for line in proc.stdout:
|
||||
(key, _, value) = line.strip().partition("=")
|
||||
(key, _, value) = line.decode('utf8').strip().partition("=")
|
||||
if key in ENV_KEYS:
|
||||
os.environ[key] = value
|
||||
|
||||
|
|
@ -317,15 +317,15 @@ def read_input(name, exe):
|
|||
exe.stdin.write(cred + "\n")
|
||||
|
||||
def debug(text):
|
||||
if DEBUG: print '[DEBUG] ' + text
|
||||
if DEBUG: print('[DEBUG] ' + text)
|
||||
|
||||
|
||||
def error(text):
|
||||
print '[ERROR] ' + text
|
||||
print('[ERROR] ' + text)
|
||||
sys.stdout.flush()
|
||||
|
||||
def info(text):
|
||||
print text
|
||||
print(text)
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
|
|
@ -517,7 +517,7 @@ def get_atlas_url_port(confdir):
|
|||
else:
|
||||
port = getConfigWithDefault(confdir, ATLAS_HTTP_PORT, DEFAULT_ATLAS_HTTP_PORT)
|
||||
|
||||
print "starting atlas on port %s" % port
|
||||
print("starting atlas on port %s" % port)
|
||||
return port
|
||||
|
||||
def get_atlas_url_host(confdir):
|
||||
|
|
@ -525,7 +525,7 @@ def get_atlas_url_host(confdir):
|
|||
host = getConfigWithDefault(confdir, ATLAS_SERVER_BIND_ADDRESS, DEFAULT_ATLAS_SERVER_HOST)
|
||||
if (host == '0.0.0.0'):
|
||||
host = DEFAULT_ATLAS_SERVER_HOST
|
||||
print "starting atlas on host %s" % host
|
||||
print("starting atlas on host %s" % host)
|
||||
return host
|
||||
|
||||
def wait_for_startup(confdir, wait):
|
||||
|
|
@ -601,7 +601,7 @@ def run_solr(dir, action, zk_url = None, port = None, logdir = None, wait=True,
|
|||
srcSolrXmlPath = os.path.join(solrServerDir(), "solr", "solr.xml")
|
||||
destSolrXmlPath = os.path.join(homedir, "solr.xml")
|
||||
if not os.path.exists(destSolrXmlPath) :
|
||||
print "solr.xml doesn't exist in " + homedir + ", copying from " + srcSolrXmlPath
|
||||
print("solr.xml doesn't exist in " + homedir + ", copying from " + srcSolrXmlPath)
|
||||
copyCmd = ["cp", srcSolrXmlPath, homedir]
|
||||
runProcess(copyCmd, logdir, False, True)
|
||||
cmd.append('-s')
|
||||
|
|
@ -694,11 +694,11 @@ def configure_cassandra(dir):
|
|||
os.remove(tmpl_file)
|
||||
|
||||
def server_already_running(pid):
|
||||
print "Atlas server is already running under process %s" % pid
|
||||
print("Atlas server is already running under process %s" % pid)
|
||||
sys.exit()
|
||||
|
||||
def server_pid_not_running(pid):
|
||||
print "The Server is no longer running with pid %s" %pid
|
||||
print("The Server is no longer running with pid %s" %pid)
|
||||
|
||||
def grep(file, value):
|
||||
for line in open(file).readlines():
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ if __name__ == '__main__':
|
|||
try:
|
||||
returncode = main()
|
||||
except Exception as e:
|
||||
print "Exception in setting up Kafka topics for Atlas: %s" % str(e)
|
||||
print("Exception in setting up Kafka topics for Atlas: %s" % str(e))
|
||||
returncode = -1
|
||||
|
||||
sys.exit(returncode)
|
||||
sys.exit(returncode)
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ if __name__ == '__main__':
|
|||
try:
|
||||
returncode = main()
|
||||
except Exception as e:
|
||||
print "Exception in setting up Kafka topics for Atlas: %s" % str(e)
|
||||
print("Exception in setting up Kafka topics for Atlas: %s" % str(e))
|
||||
returncode = -1
|
||||
|
||||
sys.exit(returncode)
|
||||
sys.exit(returncode)
|
||||
|
|
|
|||
|
|
@ -98,49 +98,49 @@ def main():
|
|||
|
||||
if os.path.isfile(atlas_pid_file):
|
||||
#Check if process listed in atlas.pid file is still running
|
||||
pf = file(atlas_pid_file, 'r')
|
||||
pf = open(atlas_pid_file, 'r')
|
||||
pid = pf.read().strip()
|
||||
pf.close()
|
||||
if pid != "":
|
||||
if mc.exist_pid((int)(pid)):
|
||||
if is_setup:
|
||||
print "Cannot run setup when server is running."
|
||||
print("Cannot run setup when server is running.")
|
||||
mc.server_already_running(pid)
|
||||
else:
|
||||
mc.server_pid_not_running(pid)
|
||||
|
||||
if is_hbase and mc.is_hbase_local(confdir):
|
||||
print "configured for local hbase."
|
||||
print("configured for local hbase.")
|
||||
mc.configure_hbase(atlas_home)
|
||||
mc.run_hbase_action(mc.hbaseBinDir(atlas_home), "start", hbase_conf_dir, logdir)
|
||||
print "hbase started."
|
||||
print("hbase started.")
|
||||
|
||||
#solr setup
|
||||
if mc.is_solr_local(confdir):
|
||||
print "configured for local solr."
|
||||
print("configured for local solr.")
|
||||
|
||||
if mc.is_cassandra_local(confdir):
|
||||
print "Cassandra embedded configured."
|
||||
print("Cassandra embedded configured.")
|
||||
mc.configure_cassandra(atlas_home)
|
||||
|
||||
if mc.is_zookeeper_local(confdir):
|
||||
mc.configure_zookeeper(atlas_home)
|
||||
mc.run_zookeeper(mc.zookeeperBinDir(atlas_home), "start", logdir)
|
||||
print "zookeeper started."
|
||||
print("zookeeper started.")
|
||||
|
||||
mc.run_solr(mc.solrBinDir(atlas_home), "start", mc.get_solr_zk_url(confdir), mc.solrPort(), logdir, True, mc.solrHomeDir(atlas_home))
|
||||
print "solr started."
|
||||
print("solr started.")
|
||||
|
||||
print "setting up solr collections..."
|
||||
print("setting up solr collections...")
|
||||
mc.create_solr_collection(mc.solrBinDir(atlas_home), mc.solrConfDir(atlas_home), "vertex_index", logdir)
|
||||
mc.create_solr_collection(mc.solrBinDir(atlas_home), mc.solrConfDir(atlas_home), "edge_index", logdir)
|
||||
mc.create_solr_collection(mc.solrBinDir(atlas_home), mc.solrConfDir(atlas_home), "fulltext_index", logdir)
|
||||
|
||||
#elasticsearch setup
|
||||
if mc.is_elasticsearch_local():
|
||||
print "configured for local elasticsearch."
|
||||
print("configured for local elasticsearch.")
|
||||
mc.start_elasticsearch(mc.elasticsearchBinDir(atlas_home), logdir)
|
||||
print "elasticsearch started."
|
||||
print("elasticsearch started.")
|
||||
|
||||
web_app_path = os.path.join(web_app_dir, "atlas")
|
||||
if (mc.isCygwin()):
|
||||
|
|
@ -148,7 +148,7 @@ def main():
|
|||
if not is_setup:
|
||||
start_atlas_server(atlas_classpath, atlas_pid_file, jvm_logdir, jvm_opts_list, web_app_path)
|
||||
mc.wait_for_startup(confdir, 300)
|
||||
print "Apache Atlas Server started!!!\n"
|
||||
print("Apache Atlas Server started!!!\n")
|
||||
else:
|
||||
process = mc.java("org.apache.atlas.web.setup.AtlasSetup", [], atlas_classpath, jvm_opts_list, jvm_logdir)
|
||||
return process.wait()
|
||||
|
|
@ -164,8 +164,8 @@ if __name__ == '__main__':
|
|||
try:
|
||||
returncode = main()
|
||||
except Exception as e:
|
||||
print "Exception: %s " % str(e)
|
||||
print traceback.format_exc()
|
||||
print("Exception: %s " % str(e))
|
||||
print(traceback.format_exc())
|
||||
returncode = -1
|
||||
|
||||
sys.exit(returncode)
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ def main():
|
|||
atlas_pid_file = mc.pidFile(atlas_home)
|
||||
|
||||
try:
|
||||
pf = file(atlas_pid_file, 'r')
|
||||
pf = open(atlas_pid_file, 'r')
|
||||
pid = int(pf.read().strip())
|
||||
pf.close()
|
||||
except:
|
||||
|
|
@ -60,7 +60,7 @@ def main():
|
|||
|
||||
mc.wait_for_shutdown(pid, "stopping atlas", 30)
|
||||
if not mc.exist_pid(pid):
|
||||
print "Apache Atlas Server stopped!!!\n"
|
||||
print("Apache Atlas Server stopped!!!\n")
|
||||
|
||||
# assuming kill worked since process check on windows is more involved...
|
||||
if os.path.exists(atlas_pid_file):
|
||||
|
|
@ -78,7 +78,7 @@ def main():
|
|||
logdir = os.path.join(atlas_home, 'logs')
|
||||
elastic_pid_file = os.path.join(logdir, 'elasticsearch.pid')
|
||||
try:
|
||||
pf = file(elastic_pid_file, 'r')
|
||||
pf = open(elastic_pid_file, 'r')
|
||||
pid = int(pf.read().strip())
|
||||
pf.close()
|
||||
except:
|
||||
|
|
@ -97,7 +97,7 @@ def main():
|
|||
|
||||
mc.wait_for_shutdown(pid, "stopping elasticsearch", 30)
|
||||
if not mc.exist_pid(pid):
|
||||
print "Elasticsearch stopped!!!\n"
|
||||
print("Elasticsearch stopped!!!\n")
|
||||
|
||||
# assuming kill worked since process check on windows is more involved...
|
||||
if os.path.exists(elastic_pid_file):
|
||||
|
|
@ -127,8 +127,8 @@ if __name__ == '__main__':
|
|||
try:
|
||||
returncode = main()
|
||||
except Exception as e:
|
||||
print "Exception: %s " % str(e)
|
||||
print traceback.format_exc()
|
||||
print("Exception: %s " % str(e))
|
||||
print(traceback.format_exc())
|
||||
returncode = -1
|
||||
|
||||
sys.exit(returncode)
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ if __name__ == '__main__':
|
|||
try:
|
||||
returncode = main()
|
||||
except Exception as e:
|
||||
print "Exception: %s " % str(e)
|
||||
print("Exception: %s " % str(e))
|
||||
returncode = -1
|
||||
|
||||
sys.exit(returncode)
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ if __name__ == '__main__':
|
|||
try:
|
||||
returncode = main()
|
||||
except Exception as e:
|
||||
print "Exception: %s " % str(e)
|
||||
print("Exception: %s " % str(e))
|
||||
returncode = -1
|
||||
|
||||
sys.exit(returncode)
|
||||
|
|
|
|||
|
|
@ -34,11 +34,11 @@ if __name__ == '__main__':
|
|||
try:
|
||||
returncode = main()
|
||||
if returncode == 0:
|
||||
print "Sample data added to Apache Atlas Server.\n"
|
||||
print("Sample data added to Apache Atlas Server.\n")
|
||||
else:
|
||||
print "No sample data added to Apache Atlas Server.\n"
|
||||
print("No sample data added to Apache Atlas Server.\n")
|
||||
except Exception as e:
|
||||
print "Exception: %s " % str(e)
|
||||
print("Exception: %s " % str(e))
|
||||
returncode = -1
|
||||
|
||||
sys.exit(returncode)
|
||||
|
|
|
|||
|
|
@ -34,11 +34,11 @@ if __name__ == '__main__':
|
|||
try:
|
||||
returncode = main()
|
||||
if returncode == 0:
|
||||
print "Example data added to Apache Atlas Server!!!\n"
|
||||
print("Example data added to Apache Atlas Server!!!\n")
|
||||
else:
|
||||
print "No data was added to the Apache Atlas Server.\n"
|
||||
print("No data was added to the Apache Atlas Server.\n")
|
||||
except Exception as e:
|
||||
print "Exception: %s " % str(e)
|
||||
print("Exception: %s " % str(e))
|
||||
returncode = -1
|
||||
|
||||
sys.exit(returncode)
|
||||
|
|
|
|||
|
|
@ -41,4 +41,8 @@
|
|||
<name>hbase.regionserver.port</name>
|
||||
<value>61520</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hbase.unsafe.stream.capability.enforce</name>
|
||||
<value>false</value>
|
||||
</property>
|
||||
</configuration>
|
||||
|
|
|
|||
Loading…
Reference in New Issue