diff --git a/LICENSE b/LICENSE
index 0480ae43b..cdd4b9eea 100755
--- a/LICENSE
+++ b/LICENSE
@@ -218,6 +218,9 @@ Apache License. For details, see 3party-licenses/janusgraph-LICENSE
This product bundles pnotify, which is available under
Apache License. For details, see 3party-licenses/pnotify-LICENSE
+This product bundles hppc, which is available under
+Apache License. For details, see 3party-licenses/pnotify-LICENSE
+
This product bundles mock(for python tests) 1.0.1, which is available under
BSD License. For details, see 3party-licenses/mock-LICENSE
diff --git a/NOTICE b/NOTICE
index 3937b113d..93104f755 100755
--- a/NOTICE
+++ b/NOTICE
@@ -1,22 +1,6 @@
-Apache Atlas (incubating)
+Apache Atlas
Copyright [2015-2017] The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).
-
-==============================================================
-
-This product bundles titan 0.5.4(https://github.com/thinkaurelius/titan/blob/titan05):
-
-==============================================================
- Titan: Distributed Graph Database
- Copyright 2012 and onwards Aurelius
-==============================================================
-Titan includes software developed by Aurelius (http://thinkaurelius.com/) and the following individuals:
-
- * Matthias Broecheler
- * Dan LaRocque
- * Marko A. Rodriguez
- * Stephen Mallette
- * Pavel Yaskevich
diff --git a/addons/falcon-bridge-shim/pom.xml b/addons/falcon-bridge-shim/pom.xml
index 4ea5df954..649e29dba 100755
--- a/addons/falcon-bridge-shim/pom.xml
+++ b/addons/falcon-bridge-shim/pom.xml
@@ -30,10 +30,6 @@
Apache Atlas Falcon Bridge Shim
jar
-
- 0.8
-
-
diff --git a/addons/falcon-bridge/pom.xml b/addons/falcon-bridge/pom.xml
index c39938330..eeef50690 100644
--- a/addons/falcon-bridge/pom.xml
+++ b/addons/falcon-bridge/pom.xml
@@ -30,10 +30,6 @@
Apache Atlas Falcon Bridge
jar
-
- 0.8
-
-
diff --git a/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java b/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java
index 05214e5a9..24f36168c 100644
--- a/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java
+++ b/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java
@@ -109,7 +109,7 @@ public class FalconHookIT {
break;
case PROCESS:
- ((org.apache.falcon.entity.v0.process.Process) entity).setName(name);
+ ((Process) entity).setName(name);
break;
}
return (T)entity;
diff --git a/addons/hbase-bridge-shim/pom.xml b/addons/hbase-bridge-shim/pom.xml
index d45b6a5bd..280dc4c43 100644
--- a/addons/hbase-bridge-shim/pom.xml
+++ b/addons/hbase-bridge-shim/pom.xml
@@ -46,6 +46,10 @@
javax.servlet
servlet-api
+
+ javax.ws.rs
+ *
+
diff --git a/addons/hbase-bridge-shim/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java b/addons/hbase-bridge-shim/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
index e8cb20b33..0b69104b1 100755
--- a/addons/hbase-bridge-shim/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
+++ b/addons/hbase-bridge-shim/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
@@ -18,86 +18,39 @@
*/
package org.apache.atlas.hbase.hook;
-import java.io.IOException;
-import java.util.List;
-import java.util.NavigableSet;
import org.apache.atlas.plugin.classloader.AtlasPluginClassLoader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellScanner;
-import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
-import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Append;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Durability;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Increment;
-import org.apache.hadoop.hbase.client.Mutation;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.CoprocessorEnvironment;
+import org.apache.hadoop.hbase.client.SnapshotDescription;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.MasterObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
-import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
-import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
-import org.apache.hadoop.hbase.filter.ByteArrayComparable;
-import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-import org.apache.hadoop.hbase.io.Reference;
-import org.apache.hadoop.hbase.io.hfile.CacheConfig;
-import org.apache.hadoop.hbase.master.RegionPlan;
-import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
-import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
-import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest;
-import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest;
-import org.apache.hadoop.hbase.regionserver.DeleteTracker;
-import org.apache.hadoop.hbase.regionserver.InternalScanner;
-import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
-import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
-import org.apache.hadoop.hbase.regionserver.Region;
-import org.apache.hadoop.hbase.regionserver.Region.Operation;
-import org.apache.hadoop.hbase.regionserver.RegionScanner;
-import org.apache.hadoop.hbase.regionserver.ScanType;
-import org.apache.hadoop.hbase.regionserver.Store;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
-import org.apache.hadoop.hbase.regionserver.StoreFile.Reader;
-import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
-import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
-import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
-import org.apache.hadoop.hbase.util.Pair;
-import org.apache.hadoop.hbase.wal.WALKey;
-import com.google.common.collect.ImmutableList;
-import java.util.Set;
-import com.google.common.net.HostAndPort;
+
+import java.io.IOException;
+import java.util.Optional;
-public class HBaseAtlasCoprocessor implements MasterObserver, RegionObserver, RegionServerObserver, BulkLoadObserver {
+public class HBaseAtlasCoprocessor implements MasterCoprocessor, MasterObserver, RegionObserver, RegionServerObserver {
public static final Log LOG = LogFactory.getLog(HBaseAtlasCoprocessor.class);
private static final String ATLAS_PLUGIN_TYPE = "hbase";
private static final String ATLAS_HBASE_HOOK_IMPL_CLASSNAME = "org.apache.atlas.hbase.hook.HBaseAtlasCoprocessor";
- private AtlasPluginClassLoader atlasPluginClassLoader = null;
- private Object impl = null;
- private MasterObserver implMasterObserver = null;
- private RegionObserver implRegionObserver = null;
- private RegionServerObserver implRegionServerObserver = null;
- private BulkLoadObserver implBulkLoadObserver = null;
+ private AtlasPluginClassLoader atlasPluginClassLoader = null;
+ private Object impl = null;
+ private MasterObserver implMasterObserver = null;
+ private RegionObserver implRegionObserver = null;
+ private RegionServerObserver implRegionServerObserver = null;
+ private MasterCoprocessor implMasterCoprocessor = null;
public HBaseAtlasCoprocessor() {
if(LOG.isDebugEnabled()) {
@@ -128,7 +81,7 @@ public class HBaseAtlasCoprocessor implements MasterObserver, RegionObserver, Re
implMasterObserver = (MasterObserver)impl;
implRegionObserver = (RegionObserver)impl;
implRegionServerObserver = (RegionServerObserver)impl;
- implBulkLoadObserver = (BulkLoadObserver)impl;
+ implMasterCoprocessor = (MasterCoprocessor)impl;
} catch (Exception e) {
// check what need to be done
@@ -142,880 +95,9 @@ public class HBaseAtlasCoprocessor implements MasterObserver, RegionObserver, Re
}
}
-
-
@Override
- public void postScannerClose(ObserverContext c, InternalScanner s) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postScannerClose()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postScannerClose(c, s);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postScannerClose()");
- }
- }
-
- @Override
- public RegionScanner postScannerOpen(ObserverContext c, Scan scan, RegionScanner s) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postScannerOpen()");
- }
-
- final RegionScanner ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.postScannerOpen(c, scan, s);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postScannerOpen()");
- }
-
- return ret;
- }
-
- @Override
- public void postStartMaster(ObserverContext ctx) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postStartMaster()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postStartMaster(ctx);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postStartMaster()");
- }
-
- }
-
- @Override
- public void preAddColumn(ObserverContext c, TableName tableName, HColumnDescriptor column) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preAddColumn()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preAddColumn(c, tableName, column);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preAddColumn()");
- }
- }
-
- @Override
- public Result preAppend(ObserverContext c, Append append) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preAppend()");
- }
-
- final Result ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preAppend(c, append);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preAppend()");
- }
-
- return ret;
- }
-
- @Override
- public void preAssign(ObserverContext c, HRegionInfo regionInfo) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preAssign()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preAssign(c, regionInfo);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preAssign()");
- }
- }
-
- @Override
- public void preBalance(ObserverContext c) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preBalance()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preBalance(c);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preBalance()");
- }
- }
-
- @Override
- public boolean preBalanceSwitch(ObserverContext c, boolean newValue) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preBalanceSwitch()");
- }
-
- final boolean ret;
-
- try {
- activatePluginClassLoader();
- ret = implMasterObserver.preBalanceSwitch(c, newValue);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preBalanceSwitch()");
- }
-
- return ret;
- }
-
- @Override
- public void preBulkLoadHFile(ObserverContext ctx, List> familyPaths) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preBulkLoadHFile()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preBulkLoadHFile(ctx, familyPaths);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preBulkLoadHFile()");
- }
-
- }
-
- @Override
- public boolean preCheckAndDelete(ObserverContext c, byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp, ByteArrayComparable comparator, Delete delete, boolean result) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCheckAndDelete()");
- }
-
- final boolean ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preCheckAndDelete(c, row, family, qualifier, compareOp, comparator, delete, result);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCheckAndDelete()");
- }
-
- return ret;
- }
-
- @Override
- public boolean preCheckAndPut(ObserverContext c, byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp, ByteArrayComparable comparator, Put put, boolean result) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCheckAndPut()");
- }
-
- final boolean ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preCheckAndPut(c, row, family, qualifier, compareOp, comparator, put, result);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCheckAndPut()");
- }
-
- return ret;
- }
-
- @Override
- public void preCloneSnapshot(ObserverContext ctx, SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCloneSnapshot()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preCloneSnapshot(ctx, snapshot, hTableDescriptor);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCloneSnapshot()");
- }
- }
-
- @Override
- public void preClose(ObserverContext e, boolean abortRequested) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preClose()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preClose(e, abortRequested);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preClose()");
- }
- }
-
- @Override
- public InternalScanner preCompact(ObserverContext e, Store store, InternalScanner scanner, ScanType scanType) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCompact()");
- }
-
- final InternalScanner ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preCompact(e, store, scanner, scanType);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCompact()");
- }
-
- return ret;
- }
-
- @Override
- public void preCompactSelection(ObserverContext e, Store store, List candidates) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCompactSelection()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preCompactSelection(e, store, candidates);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCompactSelection()");
- }
- }
-
- @Override
- public void preCreateTable(ObserverContext c, HTableDescriptor desc, HRegionInfo[] regions) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCreateTable()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preCreateTable(c, desc, regions);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCreateTable()");
- }
- }
-
- @Override
- public void preDelete(ObserverContext c, Delete delete, WALEdit edit, Durability durability) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preDelete()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preDelete(c, delete, edit, durability);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preDelete()");
- }
- }
-
- @Override
- public void preDeleteColumn(ObserverContext c, TableName tableName, byte[] col) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preDeleteColumn()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preDeleteColumn(c, tableName, col);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preDeleteColumn()");
- }
- }
-
- @Override
- public void preDeleteSnapshot(ObserverContext ctx, SnapshotDescription snapshot) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preDeleteSnapshot()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preDeleteSnapshot(ctx, snapshot);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preDeleteSnapshot()");
- }
- }
-
- @Override
- public void preDeleteTable(ObserverContext c, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preDeleteTable()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preDeleteTable(c, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preDeleteTable()");
- }
- }
-
- @Override
- public void preDisableTable(ObserverContext c, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preDisableTable()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preDisableTable(c, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preDisableTable()");
- }
- }
-
- @Override
- public void preEnableTable(ObserverContext c, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preEnableTable()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preEnableTable(c, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preEnableTable()");
- }
- }
-
- @Override
- public boolean preExists(ObserverContext c, Get get, boolean exists) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preExists()");
- }
-
- final boolean ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preExists(c, get, exists);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preExists()");
- }
-
- return ret;
- }
-
- @Override
- public void preFlush(ObserverContext e) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preFlush()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preFlush(e);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preFlush()");
- }
- }
-
- @Override
- public void preGetClosestRowBefore(ObserverContext c, byte[] row, byte[] family, Result result) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preGetClosestRowBefore()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preGetClosestRowBefore(c, row, family, result);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preGetClosestRowBefore()");
- }
- }
-
- @Override
- public Result preIncrement(ObserverContext c, Increment increment) throws IOException {
- final Result ret;
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preIncrement()");
- }
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preIncrement(c, increment);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preIncrement()");
- }
-
- return ret;
- }
-
- @Override
- public long preIncrementColumnValue(ObserverContext c, byte[] row, byte[] family, byte[] qualifier, long amount, boolean writeToWAL) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preIncrementColumnValue()");
- }
-
- final long ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preIncrementColumnValue(c, row, family, qualifier, amount, writeToWAL);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preIncrementColumnValue()");
- }
-
- return ret;
- }
-
- @Override
- public void preModifyColumn(ObserverContext c, TableName tableName, HColumnDescriptor descriptor) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preModifyColumn()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preModifyColumn(c, tableName, descriptor);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preModifyColumn()");
- }
- }
-
- @Override
- public void preModifyTable(ObserverContext c, TableName tableName, HTableDescriptor htd) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preModifyTable()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preModifyTable(c, tableName, htd);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preModifyTable()");
- }
- }
-
- @Override
- public void preMove(ObserverContext c, HRegionInfo region, ServerName srcServer, ServerName destServer) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preMove()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preMove(c, region, srcServer, destServer);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preMove()");
- }
- }
-
- @Override
- public void preOpen(ObserverContext e) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preOpen()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preOpen(e);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preOpen()");
- }
- }
-
- @Override
- public void preRestoreSnapshot(ObserverContext ctx, SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preRestoreSnapshot()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preRestoreSnapshot(ctx, snapshot, hTableDescriptor);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preRestoreSnapshot()");
- }
- }
-
- @Override
- public void preScannerClose(ObserverContext c, InternalScanner s) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preScannerClose()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preScannerClose(c, s);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preScannerClose()");
- }
- }
-
- @Override
- public boolean preScannerNext(ObserverContext c, InternalScanner s, List result, int limit, boolean hasNext) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preScannerNext()");
- }
-
- final boolean ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preScannerNext(c, s, result, limit, hasNext);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preScannerNext()");
- }
-
- return ret;
- }
-
- @Override
- public RegionScanner preScannerOpen(ObserverContext c, Scan scan, RegionScanner s) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preScannerOpen()");
- }
-
- final RegionScanner ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preScannerOpen(c, scan, s);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preScannerOpen()");
- }
-
- return ret;
- }
-
- @Override
- public void preShutdown(ObserverContext c) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preShutdown()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preShutdown(c);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preShutdown()");
- }
- }
-
- @Override
- public void preSnapshot(ObserverContext ctx, SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preSnapshot()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preSnapshot(ctx, snapshot, hTableDescriptor);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preSnapshot()");
- }
- }
-
- @Override
- public void preSplit(ObserverContext e) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preSplit()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preSplit(e);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preSplit()");
- }
- }
-
- @Override
- public void preStopMaster(ObserverContext c) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preStopMaster()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preStopMaster(c);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preStopMaster()");
- }
- }
-
- @Override
- public void preStopRegionServer(ObserverContext env) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preStopRegionServer()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionServerObserver.preStopRegionServer(env);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preStopRegionServer()");
- }
- }
-
- @Override
- public void preUnassign(ObserverContext c, HRegionInfo regionInfo, boolean force) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preUnassign()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preUnassign(c, regionInfo, force);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preUnassign()");
- }
- }
-
- @Override
- public void preSetUserQuota(ObserverContext ctx, String userName, Quotas quotas) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preSetUserQuota()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preSetUserQuota(ctx, userName, quotas);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preSetUserQuota()");
- }
- }
-
- @Override
- public void preSetUserQuota(ObserverContext ctx, String userName, TableName tableName, Quotas quotas) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preSetUserQuota()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preSetUserQuota(ctx, userName, tableName, quotas);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preSetUserQuota()");
- }
- }
-
- @Override
- public void preSetUserQuota(ObserverContext ctx, String userName, String namespace, Quotas quotas) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preSetUserQuota()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preSetUserQuota(ctx, userName, namespace, quotas);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preSetUserQuota()");
- }
- }
-
- @Override
- public void preSetTableQuota(ObserverContext ctx, TableName tableName, Quotas quotas) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preSetTableQuota()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preSetTableQuota(ctx, tableName, quotas);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preSetTableQuota()");
- }
- }
-
- @Override
- public void preSetNamespaceQuota(ObserverContext ctx, String namespace, Quotas quotas) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preSetNamespaceQuota()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preSetNamespaceQuota(ctx, namespace, quotas);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preSetNamespaceQuota()");
- }
+ public Optional getMasterObserver() {
+ return Optional.of(this);
}
@Override
@@ -1026,1441 +108,19 @@ public class HBaseAtlasCoprocessor implements MasterObserver, RegionObserver, Re
try {
activatePluginClassLoader();
- implMasterObserver.start(env);
+ if (env instanceof MasterCoprocessorEnvironment) {
+ implMasterCoprocessor.start(env);
+ }
} finally {
deactivatePluginClassLoader();
}
-
if(LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessor.start()");
}
}
@Override
- public void prePut(ObserverContext c, Put put, WALEdit edit, Durability durability) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.prePut()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.prePut(c, put, edit, durability);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.prePut()");
- }
- }
-
- @Override
- public void preGetOp(ObserverContext rEnv, Get get, List result) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preGetOp()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preGetOp(rEnv, get, result);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preGetOp()");
- }
- }
-
- @Override
- public void preRegionOffline(ObserverContext c, HRegionInfo regionInfo) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preRegionOffline()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preRegionOffline(c, regionInfo);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preRegionOffline()");
- }
- }
-
- @Override
- public void preCreateNamespace(ObserverContext ctx, NamespaceDescriptor ns) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCreateNamespace()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preCreateNamespace(ctx, ns);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCreateNamespace()");
- }
- }
-
- @Override
- public void preDeleteNamespace(ObserverContext ctx, String namespace) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preDeleteNamespace()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preDeleteNamespace(ctx, namespace);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preDeleteNamespace()");
- }
- }
-
- @Override
- public void preModifyNamespace(ObserverContext ctx, NamespaceDescriptor ns) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preModifyNamespace()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preModifyNamespace(ctx, ns);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preModifyNamespace()");
- }
- }
-
- @Override
- public void postGetTableDescriptors(ObserverContext ctx, List tableNamesList, List descriptors, String regex) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postGetTableDescriptors()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postGetTableDescriptors(ctx, tableNamesList, descriptors, regex);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postGetTableDescriptors()");
- }
- }
-
- @Override
- public void preMerge(ObserverContext ctx, Region regionA, Region regionB) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preMerge()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionServerObserver.preMerge(ctx, regionA, regionB);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preMerge()");
- }
- }
-
- @Override
- public void prePrepareBulkLoad(ObserverContext ctx, PrepareBulkLoadRequest request) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.prePrepareBulkLoad()");
- }
-
- try {
- activatePluginClassLoader();
- implBulkLoadObserver.prePrepareBulkLoad(ctx, request);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.prePrepareBulkLoad()");
- }
- }
-
- @Override
- public void preCleanupBulkLoad(ObserverContext ctx, CleanupBulkLoadRequest request) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCleanupBulkLoad()");
- }
-
- try {
- activatePluginClassLoader();
- implBulkLoadObserver.preCleanupBulkLoad(ctx, request);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCleanupBulkLoad()");
- }
- }
-
-
- @Override
- public void stop(CoprocessorEnvironment env) throws IOException {
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.stop()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.stop(env);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.stop()");
- }
- }
-
- @Override
- public void postMerge(ObserverContext c, Region regionA, Region regionB, Region mergedRegion) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postMerge()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionServerObserver.postMerge(c, regionA, regionB, mergedRegion);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postMerge()");
- }
- }
-
- @Override
- public void preMergeCommit(ObserverContext ctx, Region regionA, Region regionB, List metaEntries) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preMergeCommit()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionServerObserver.preMergeCommit(ctx ,regionA, regionB, metaEntries);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preMergeCommit()");
- }
- }
-
- @Override
- public void postMergeCommit(ObserverContext ctx, Region regionA, Region regionB, Region mergedRegion) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postMergeCommit()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionServerObserver.postMergeCommit(ctx ,regionA, regionB, mergedRegion);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postMergeCommit()");
- }
- }
-
- @Override
- public void preRollBackMerge(ObserverContext ctx, Region regionA, Region regionB) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preRollBackMerge()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionServerObserver.preRollBackMerge(ctx, regionA, regionB);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preRollBackMerge()");
- }
- }
-
- @Override
- public void postRollBackMerge(ObserverContext ctx, Region regionA, Region regionB) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postRollBackMerge()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionServerObserver.postRollBackMerge(ctx, regionA, regionB);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postRollBackMerge()");
- }
- }
-
- @Override
- public void preRollWALWriterRequest(ObserverContext ctx) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preRollWALWriterRequest()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionServerObserver.preRollWALWriterRequest(ctx);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preRollWALWriterRequest()");
- }
- }
-
- @Override
- public void postRollWALWriterRequest(ObserverContext ctx) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postRollWALWriterRequest()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionServerObserver.postRollWALWriterRequest(ctx);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postRollWALWriterRequest()");
- }
- }
-
- @Override
- public ReplicationEndpoint postCreateReplicationEndPoint(ObserverContext ctx, ReplicationEndpoint endpoint) {
-
- final ReplicationEndpoint ret;
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postCreateReplicationEndPoint()");
- }
-
- try {
- activatePluginClassLoader();
- ret = implRegionServerObserver.postCreateReplicationEndPoint(ctx, endpoint);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postCreateReplicationEndPoint()");
- }
-
- return ret;
- }
-
- @Override
- public void preReplicateLogEntries(ObserverContext ctx, List entries, CellScanner cells) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preReplicateLogEntries()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionServerObserver.preReplicateLogEntries(ctx, entries, cells);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preReplicateLogEntries()");
- }
- }
-
- @Override
- public void postReplicateLogEntries(ObserverContext ctx, List entries, CellScanner cells) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postReplicateLogEntries()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionServerObserver.postReplicateLogEntries(ctx, entries, cells);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postReplicateLogEntries()");
- }
- }
-
- @Override
- public void postOpen(ObserverContext c) {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postOpen()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postOpen(c);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postOpen()");
- }
- }
-
- @Override
- public void postLogReplay(ObserverContext c) {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postLogReplay()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postLogReplay(c);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postLogReplay()");
- }
- }
-
- @Override
- public InternalScanner preFlushScannerOpen(ObserverContext c, Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException {
-
- final InternalScanner ret;
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preFlushScannerOpen()");
- }
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preFlushScannerOpen(c, store, memstoreScanner, s);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preFlushScannerOpen()");
- }
-
- return ret;
- }
-
- @Override
- public InternalScanner preFlush(ObserverContext c, Store store, InternalScanner scanner) throws IOException {
-
- final InternalScanner ret;
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preFlush()");
- }
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preFlush(c, store, scanner);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preFlush()");
- }
-
- return ret;
- }
-
- @Override
- public void postFlush(ObserverContext c) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postFlush()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postFlush(c);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postFlush()");
- }
- }
-
- @Override
- public void postFlush(ObserverContext c, Store store, StoreFile resultFile) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postFlush()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postFlush(c, store, resultFile);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postFlush()");
- }
- }
-
- @Override
- public void preCompactSelection(ObserverContext c, Store store, List candidates, CompactionRequest request) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCompactSelection()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preCompactSelection(c, store, candidates, request);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCompactSelection()");
- }
- }
-
- @Override
- public void postCompactSelection(ObserverContext c, Store store, ImmutableList selected, CompactionRequest request) {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postCompactSelection()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postCompactSelection(c, store, selected, request);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postCompactSelection()");
- }
- }
-
- @Override
- public void postCompactSelection(ObserverContext c, Store store, ImmutableList selected) {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postCompactSelection()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postCompactSelection(c, store, selected);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postCompactSelection()");
- }
- }
-
- @Override
- public InternalScanner preCompact(ObserverContext c, Store store, InternalScanner scanner, ScanType scanType, CompactionRequest request) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCompact()");
- }
-
- final InternalScanner ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preCompact(c, store, scanner, scanType, request);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCompact()");
- }
-
- return ret;
- }
-
- @Override
- public InternalScanner preCompactScannerOpen(ObserverContext c, Store store, List extends KeyValueScanner> scanners, ScanType scanType,
- long earliestPutTs, InternalScanner s, CompactionRequest request) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCompactScannerOpen()");
- }
-
- final InternalScanner ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preCompactScannerOpen(c, store, scanners, scanType, earliestPutTs, s,request);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCompactScannerOpen()");
- }
-
- return ret;
- }
-
- @Override
- public InternalScanner preCompactScannerOpen(ObserverContext c, Store store, List extends KeyValueScanner> scanners, ScanType scanType,
- long earliestPutTs, InternalScanner s) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCompactScannerOpen()");
- }
-
- final InternalScanner ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preCompactScannerOpen(c, store, scanners, scanType, earliestPutTs, s);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCompactScannerOpen()");
- }
-
- return ret;
- }
-
- @Override
- public void postCompact(ObserverContext c, Store store, StoreFile resultFile, CompactionRequest request) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postCompact()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postCompact(c, store, resultFile, request);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postCompact()");
- }
- }
-
- @Override
- public void postCompact(ObserverContext c, Store store, StoreFile resultFile) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postCompact()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postCompact(c, store, resultFile);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postCompact()");
- }
- }
-
- @Override
- public void preSplit(ObserverContext c, byte[] splitRow) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preSplit()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preSplit(c, splitRow);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preSplit()");
- }
- }
-
- @Override
- public void postSplit(ObserverContext c, Region l, Region r) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postSplit()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postSplit(c, l, r);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postSplit()");
- }
- }
-
- @Override
- public void preSplitBeforePONR(ObserverContext ctx, byte[] splitKey, List metaEntries) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preSplitBeforePONR()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preSplitBeforePONR(ctx, splitKey, metaEntries);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preSplitBeforePONR()");
- }
- }
-
- @Override
- public void preSplitAfterPONR(ObserverContext ctx) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preSplitAfterPONR()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preSplitAfterPONR(ctx);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preSplitAfterPONR()");
- }
- }
-
- @Override
- public void preRollBackSplit(ObserverContext ctx) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preRollBackSplit()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preRollBackSplit(ctx);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preRollBackSplit()");
- }
- }
-
- @Override
- public void postRollBackSplit(ObserverContext ctx) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postRollBackSplit()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postRollBackSplit(ctx);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postRollBackSplit()");
- }
- }
-
- @Override
- public void postCompleteSplit(ObserverContext ctx) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postCompleteSplit()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postCompleteSplit(ctx);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postCompleteSplit()");
- }
- }
-
- @Override
- public void postClose(ObserverContext c, boolean abortRequested) {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postClose()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postClose(c, abortRequested);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postClose()");
- }
- }
-
- @Override
- public void postGetClosestRowBefore(ObserverContext c, byte[] row, byte[] family, Result result) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postGetClosestRowBefore()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postGetClosestRowBefore(c, row, family, result);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postGetClosestRowBefore()");
- }
- }
-
- @Override
- public void postGetOp(ObserverContext c, Get get, List| result) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postGetOp()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postGetOp(c, get, result);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postGetOp()");
- }
- }
-
- @Override
- public boolean postExists(ObserverContext c, Get get, boolean exists) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postExists()");
- }
-
- final boolean ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.postExists(c, get, exists);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postExists()");
- }
-
- return ret;
- }
-
- @Override
- public void postPut(ObserverContext c, Put put, WALEdit edit, Durability durability) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postPut()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postPut(c, put, edit, durability);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postPut()");
- }
- }
-
- @Override
- public void prePrepareTimeStampForDeleteVersion(ObserverContext c, Mutation mutation, Cell cell, byte[] byteNow, Get get) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.prePrepareTimeStampForDeleteVersion()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.prePrepareTimeStampForDeleteVersion(c, mutation, cell, byteNow, get);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.prePrepareTimeStampForDeleteVersion()");
- }
- }
-
- @Override
- public void postDelete(ObserverContext c, Delete delete, WALEdit edit, Durability durability) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postDelete()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postDelete(c, delete, edit, durability);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postDelete()");
- }
- }
-
- @Override
- public void preBatchMutate(ObserverContext c, MiniBatchOperationInProgress miniBatchOp) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preBatchMutate()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preBatchMutate(c, miniBatchOp);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preBatchMutate()");
- }
- }
-
- @Override
- public void postBatchMutate(ObserverContext c, MiniBatchOperationInProgress miniBatchOp) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postBatchMutate()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postBatchMutate(c, miniBatchOp);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postBatchMutate()");
- }
- }
-
- @Override
- public void postStartRegionOperation(ObserverContext ctx, Operation operation) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postStartRegionOperation()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postStartRegionOperation(ctx, operation);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postStartRegionOperation()");
- }
- }
-
- @Override
- public void postCloseRegionOperation(ObserverContext ctx, Operation operation) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postCloseRegionOperation()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postCloseRegionOperation(ctx, operation);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postCloseRegionOperation()");
- }
- }
-
- @Override
- public void postBatchMutateIndispensably(ObserverContext ctx, MiniBatchOperationInProgress miniBatchOp, boolean success) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postBatchMutateIndispensably()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postBatchMutateIndispensably(ctx, miniBatchOp, success);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postBatchMutateIndispensably()");
- }
- }
-
- @Override
- public boolean preCheckAndPutAfterRowLock(ObserverContext c, byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp,
- ByteArrayComparable comparator, Put put, boolean result) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCheckAndPutAfterRowLock()");
- }
-
- final boolean ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preCheckAndPutAfterRowLock(c, row, family, qualifier, compareOp, comparator, put, result);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCheckAndPutAfterRowLock()");
- }
-
- return ret;
- }
-
- @Override
- public boolean postCheckAndPut(ObserverContext c, byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp,
- ByteArrayComparable comparator, Put put, boolean result) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postCheckAndPut()");
- }
-
- final boolean ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.postCheckAndPut(c, row, family, qualifier, compareOp, comparator, put, result);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postCheckAndPut()");
- }
-
- return ret;
- }
-
- @Override
- public boolean preCheckAndDeleteAfterRowLock(ObserverContext c, byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp,
- ByteArrayComparable comparator, Delete delete, boolean result) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCheckAndDeleteAfterRowLock()");
- }
-
- final boolean ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preCheckAndDeleteAfterRowLock(c, row, family, qualifier, compareOp, comparator, delete, result);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCheckAndDeleteAfterRowLock()");
- }
-
- return ret;
- }
-
- @Override
- public boolean postCheckAndDelete(ObserverContext c, byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp,
- ByteArrayComparable comparator, Delete delete, boolean result) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postCheckAndDelete()");
- }
-
- final boolean ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.postCheckAndDelete(c, row, family, qualifier, compareOp, comparator, delete, result);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postCheckAndDelete()");
- }
-
- return ret;
- }
-
- @Override
- public long postIncrementColumnValue(ObserverContext c, byte[] row, byte[] family, byte[] qualifier, long amount, boolean writeToWAL, long result) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postIncrementColumnValue()");
- }
-
- final long ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.postIncrementColumnValue(c, row, family, qualifier, amount, writeToWAL, result);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postIncrementColumnValue()");
- }
-
- return ret;
- }
-
- @Override
- public Result preAppendAfterRowLock(ObserverContext c, Append append) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preAppendAfterRowLock()");
- }
-
- final Result ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preAppendAfterRowLock(c, append);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preAppendAfterRowLock()");
- }
-
- return ret;
- }
-
- @Override
- public Result postAppend(ObserverContext c, Append append, Result result) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postAppend()");
- }
-
- final Result ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.postAppend(c, append, result);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postAppend()");
- }
-
- return ret;
- }
-
- @Override
- public Result preIncrementAfterRowLock(ObserverContext c, Increment increment) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preIncrementAfterRowLock()");
- }
-
- final Result ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preIncrementAfterRowLock(c, increment);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preIncrementAfterRowLock()");
- }
-
- return ret;
- }
-
- @Override
- public Result postIncrement(ObserverContext c, Increment increment, Result result) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postIncrement()");
- }
-
- final Result ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.postIncrement(c, increment, result );
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postIncrement()");
- }
-
- return ret;
- }
-
- @Override
- public KeyValueScanner preStoreScannerOpen(ObserverContext c, Store store, Scan scan, NavigableSet targetCols, KeyValueScanner s) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preStoreScannerOpen()");
- }
-
- final KeyValueScanner ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preStoreScannerOpen(c, store, scan, targetCols, s);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preStoreScannerOpen()");
- }
-
- return ret;
- }
-
- @Override
- public boolean postScannerNext(ObserverContext c, InternalScanner s, List result, int limit, boolean hasNext) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postScannerNext()");
- }
-
- final boolean ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.postScannerNext(c, s, result, limit, hasNext);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postScannerNext()");
- }
-
- return ret;
- }
-
- @Override
- public boolean postScannerFilterRow(ObserverContext c, InternalScanner s, byte[] currentRow, int offset, short length, boolean hasMore) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postScannerFilterRow()");
- }
-
- final boolean ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.postScannerFilterRow(c, s, currentRow, offset, length, hasMore);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postScannerFilterRow()");
- }
-
- return ret;
- }
-
- @Override
- public void preWALRestore(ObserverContext extends RegionCoprocessorEnvironment> ctx, HRegionInfo info, WALKey logKey, WALEdit logEdit) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preWALRestore()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preWALRestore(ctx, info, logKey, logEdit);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preWALRestore()");
- }
- }
-
- @Override
- public void postWALRestore(ObserverContext extends RegionCoprocessorEnvironment> ctx, HRegionInfo info, WALKey logKey, WALEdit logEdit) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postWALRestore()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postWALRestore(ctx, info, logKey, logEdit);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postWALRestore()");
- }
- }
-
- @Override
- public boolean postBulkLoadHFile(ObserverContext ctx, List> familyPaths, boolean hasLoaded) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postBulkLoadHFile()");
- }
-
- final boolean ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.postBulkLoadHFile(ctx, familyPaths, hasLoaded);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postBulkLoadHFile()");
- }
-
- return ret;
- }
-
- @Override
- public Reader preStoreFileReaderOpen(ObserverContext ctx, FileSystem fs, Path p, FSDataInputStreamWrapper in, long size,
- CacheConfig cacheConf, Reference r, Reader reader) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preStoreFileReaderOpen()");
- }
-
- final Reader ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.preStoreFileReaderOpen(ctx, fs, p, in, size, cacheConf, r, reader);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preStoreFileReaderOpen()");
- }
-
- return ret;
- }
-
- @Override
- public Reader postStoreFileReaderOpen(ObserverContext ctx, FileSystem fs, Path p, FSDataInputStreamWrapper in, long size,
- CacheConfig cacheConf, Reference r, Reader reader) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postStoreFileReaderOpen()");
- }
-
- final Reader ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.postStoreFileReaderOpen(ctx, fs, p, in, size, cacheConf, r, reader);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postStoreFileReaderOpen()");
- }
-
- return ret;
- }
-
- @Override
- public Cell postMutationBeforeWAL(ObserverContext ctx, MutationType opType, Mutation mutation, Cell oldCell, Cell newCell) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postMutationBeforeWAL()");
- }
-
- final Cell ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.postMutationBeforeWAL(ctx, opType, mutation, oldCell, newCell);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postMutationBeforeWAL()");
- }
-
- return ret;
- }
-
- @Override
- public DeleteTracker postInstantiateDeleteTracker(ObserverContext ctx, DeleteTracker delTracker) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postInstantiateDeleteTracker()");
- }
-
- final DeleteTracker ret;
-
- try {
- activatePluginClassLoader();
- ret = implRegionObserver.postInstantiateDeleteTracker(ctx, delTracker);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postInstantiateDeleteTracker()");
- }
-
- return ret;
- }
-
- @Override
- public void postCreateTable(ObserverContext ctx, HTableDescriptor desc, HRegionInfo[] regions) throws IOException {
+ public void postCreateTable(ObserverContext ctx, TableDescriptor desc, RegionInfo[] regions) throws IOException {
if(LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasCoprocessor.postCreateTable()");
}
@@ -2478,38 +138,20 @@ public class HBaseAtlasCoprocessor implements MasterObserver, RegionObserver, Re
}
@Override
- public void preCreateTableHandler(ObserverContext ctx, HTableDescriptor desc, HRegionInfo[] regions) throws IOException {
+ public void postModifyTable(ObserverContext ctx, TableName tableName, TableDescriptor htd) throws IOException {
if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preCreateTableHandler()");
+ LOG.debug("==> HBaseAtlasCoprocessor.postModifyTable()");
}
try {
activatePluginClassLoader();
- implMasterObserver.preCreateTableHandler(ctx, desc, regions);
+ implMasterObserver.postModifyTable(ctx, tableName, htd);
} finally {
deactivatePluginClassLoader();
}
if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preCreateTableHandler()");
- }
- }
-
- @Override
- public void postCreateTableHandler(ObserverContext ctx, HTableDescriptor desc, HRegionInfo[] regions) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postCreateTableHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postCreateTableHandler(ctx, desc, regions);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postCreateTableHandler()");
+ LOG.debug("<== HBaseAtlasCoprocessor.postModifyTable()");
}
}
@@ -2531,766 +173,10 @@ public class HBaseAtlasCoprocessor implements MasterObserver, RegionObserver, Re
}
}
- @Override
- public void preDeleteTableHandler(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preDeleteTableHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preDeleteTableHandler(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preDeleteTableHandler()");
- }
- }
-
- @Override
- public void postDeleteTableHandler(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postDeleteTableHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postDeleteTableHandler(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postDeleteTableHandler()");
- }
- }
-
- @Override
- public void preTruncateTable(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preTruncateTable()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preTruncateTable(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preTruncateTable()");
- }
- }
-
- @Override
- public void postTruncateTable(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postTruncateTable()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postTruncateTable(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postTruncateTable()");
- }
- }
-
- @Override
- public void preTruncateTableHandler(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preTruncateTableHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preTruncateTableHandler(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preTruncateTableHandler()");
- }
- }
-
- @Override
- public void postTruncateTableHandler(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postTruncateTableHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postTruncateTableHandler(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postTruncateTableHandler()");
- }
- }
-
- @Override
- public void postModifyTable(ObserverContext ctx, TableName tableName, HTableDescriptor htd) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postModifyTable()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postModifyTable(ctx, tableName, htd);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postModifyTable()");
- }
- }
-
- @Override
- public void preModifyTableHandler(ObserverContext ctx, TableName tableName, HTableDescriptor htd) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preModifyTableHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preModifyTableHandler(ctx, tableName, htd);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preModifyTableHandler()");
- }
- }
-
- @Override
- public void postModifyTableHandler(ObserverContext ctx, TableName tableName, HTableDescriptor htd) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postModifyTableHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postModifyTableHandler(ctx, tableName, htd);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postModifyTableHandler()");
- }
- }
-
- @Override
- public void postAddColumn(ObserverContext ctx, TableName tableName, HColumnDescriptor column) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postAddColumn()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postAddColumn(ctx, tableName, column);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postAddColumn()");
- }
- }
-
- @Override
- public void preAddColumnHandler(ObserverContext ctx, TableName tableName, HColumnDescriptor column) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preAddColumnHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preAddColumnHandler(ctx, tableName, column);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preAddColumnHandler()");
- }
- }
-
- @Override
- public void postAddColumnHandler(ObserverContext ctx, TableName tableName, HColumnDescriptor column) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postAddColumnHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postAddColumnHandler(ctx, tableName, column);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postAddColumnHandler()");
- }
- }
-
- @Override
- public void postModifyColumn(ObserverContext ctx, TableName tableName, HColumnDescriptor descriptor) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postModifyColumn()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postModifyColumn(ctx, tableName, descriptor);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postModifyColumn()");
- }
- }
-
- @Override
- public void preModifyColumnHandler(ObserverContext ctx, TableName tableName, HColumnDescriptor descriptor) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preModifyColumnHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preModifyColumnHandler(ctx, tableName, descriptor);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preModifyColumnHandler()");
- }
- }
-
- @Override
- public void postModifyColumnHandler(ObserverContext ctx, TableName tableName, HColumnDescriptor descriptor) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postModifyColumnHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postModifyColumnHandler(ctx, tableName, descriptor);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postModifyColumnHandler()");
- }
- }
-
- @Override
- public void postDeleteColumn(ObserverContext ctx, TableName tableName, byte[] c) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postDeleteColumn()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postDeleteColumn(ctx, tableName, c);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postDeleteColumn()");
- }
- }
-
- @Override
- public void preDeleteColumnHandler(ObserverContext ctx, TableName tableName, byte[] c) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preDeleteColumnHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preDeleteColumnHandler(ctx, tableName, c);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preDeleteColumnHandler()");
- }
- }
-
- @Override
- public void postDeleteColumnHandler(ObserverContext ctx, TableName tableName, byte[] c) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postDeleteColumnHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postDeleteColumnHandler(ctx, tableName, c);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postDeleteColumnHandler()");
- }
- }
-
- @Override
- public void postEnableTable(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postEnableTable()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postEnableTable(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postEnableTable()");
- }
- }
-
- @Override
- public void preEnableTableHandler(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preEnableTableHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preEnableTableHandler(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preEnableTableHandler()");
- }
- }
-
- @Override
- public void postEnableTableHandler(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postEnableTableHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postEnableTableHandler(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postEnableTableHandler()");
- }
- }
-
- @Override
- public void postDisableTable(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postDisableTable()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postDisableTable(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postDisableTable()");
- }
- }
-
- @Override
- public void preDisableTableHandler(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preDisableTableHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preDisableTableHandler(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preDisableTableHandler()");
- }
- }
-
- @Override
- public void postDisableTableHandler(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postDisableTableHandler()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postDisableTableHandler(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postDisableTableHandler()");
- }
- }
-
- @Override
- public void postMove(ObserverContext ctx, HRegionInfo region, ServerName srcServer, ServerName destServer) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postMove()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postMove(ctx, region, srcServer, destServer);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postMove()");
- }
- }
-
- @Override
- public void postAssign(ObserverContext ctx, HRegionInfo regionInfo) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postAssign()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postAssign(ctx, regionInfo);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postAssign()");
- }
- }
-
- @Override
- public void postUnassign(ObserverContext ctx, HRegionInfo regionInfo, boolean force) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postUnassign()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postUnassign(ctx, regionInfo, force);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postUnassign()");
- }
- }
-
- @Override
- public void postRegionOffline(ObserverContext ctx, HRegionInfo regionInfo) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postRegionOffline()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postRegionOffline(ctx, regionInfo);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postRegionOffline()");
- }
- }
-
- @Override
- public void postBalance(ObserverContext ctx, List plans) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postBalance()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postBalance(ctx, plans);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postBalance()");
- }
- }
-
- @Override
- public void postBalanceSwitch(ObserverContext ctx, boolean oldValue, boolean newValue) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postBalanceSwitch()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postBalanceSwitch(ctx, oldValue, newValue);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postBalanceSwitch()");
- }
- }
-
- @Override
- public void preMasterInitialization(ObserverContext ctx) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preMasterInitialization()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preMasterInitialization(ctx);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preMasterInitialization()");
- }
- }
-
- @Override
- public void postSnapshot(ObserverContext ctx, SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postSnapshot()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postSnapshot(ctx, snapshot, hTableDescriptor);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postSnapshot()");
- }
- }
-
- @Override
- public void preListSnapshot(ObserverContext ctx, SnapshotDescription snapshot) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preListSnapshot()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preListSnapshot(ctx, snapshot);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preListSnapshot()");
- }
- }
-
- @Override
- public void postListSnapshot(ObserverContext ctx, SnapshotDescription snapshot) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postListSnapshot()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postListSnapshot(ctx, snapshot);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postListSnapshot()");
- }
- }
-
- @Override
- public void postCloneSnapshot(ObserverContext ctx, SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postCloneSnapshot()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postCloneSnapshot(ctx, snapshot, hTableDescriptor);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postCloneSnapshot()");
- }
- }
-
- @Override
- public void postRestoreSnapshot(ObserverContext ctx, SnapshotDescription snapshot, HTableDescriptor hTableDescriptor) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postRestoreSnapshot()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postRestoreSnapshot(ctx, snapshot, hTableDescriptor);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postRestoreSnapshot()");
- }
- }
-
- @Override
- public void postDeleteSnapshot(ObserverContext ctx, SnapshotDescription snapshot) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postDeleteSnapshot()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postDeleteSnapshot(ctx, snapshot);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postDeleteSnapshot()");
- }
- }
-
- @Override
- public void preGetTableDescriptors(ObserverContext ctx, List tableNamesList, List descriptors) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preGetTableDescriptors()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preGetTableDescriptors(ctx, tableNamesList, descriptors);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preGetTableDescriptors()");
- }
- }
-
- @Override
- public void postGetTableDescriptors(ObserverContext ctx, List descriptors) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postGetTableDescriptors()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postGetTableDescriptors(ctx, descriptors);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postGetTableDescriptors()");
- }
- }
-
- @Override
- public void preGetTableDescriptors(ObserverContext ctx, List tableNamesList, List descriptors, String regex) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preGetTableDescriptors()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preGetTableDescriptors(ctx, tableNamesList, descriptors, regex);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preGetTableDescriptors()");
- }
- }
-
- @Override
- public void preGetTableNames(ObserverContext ctx, List descriptors, String regex) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preGetTableNames()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preGetTableNames(ctx, descriptors, regex);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preGetTableNames()");
- }
- }
-
- @Override
- public void postGetTableNames(ObserverContext ctx, List descriptors, String regex) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postGetTableNames()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postGetTableNames(ctx, descriptors, regex);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postGetTableNames()");
- }
- }
-
@Override
public void postCreateNamespace(ObserverContext ctx, NamespaceDescriptor ns) throws IOException {
if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postCreateNamespace()");
+ LOG.debug("==> HBaseAtlasCoprocessor.preCreateNamespace()");
}
try {
@@ -3301,277 +187,78 @@ public class HBaseAtlasCoprocessor implements MasterObserver, RegionObserver, Re
}
if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postCreateNamespace()");
+ LOG.debug("<== HBaseAtlasCoprocessor.preCreateNamespace()");
}
}
@Override
- public void postDeleteNamespace(ObserverContext ctx, String namespace) throws IOException {
+ public void postDeleteNamespace(ObserverContext ctx, String ns) throws IOException {
if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postDeleteNamespace()");
+ LOG.debug("==> HBaseAtlasCoprocessor.preDeleteNamespace()");
}
try {
activatePluginClassLoader();
- implMasterObserver.postDeleteNamespace(ctx, namespace);
+ implMasterObserver.postDeleteNamespace(ctx, ns);
} finally {
deactivatePluginClassLoader();
}
if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postDeleteNamespace()");
+ LOG.debug("<== HBaseAtlasCoprocessor.preDeleteNamespace()");
}
}
-
@Override
public void postModifyNamespace(ObserverContext ctx, NamespaceDescriptor ns) throws IOException {
if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postModifyNamespace()");
+ LOG.debug("==> HBaseAtlasCoprocessor.preModifyNamespace()");
}
try {
activatePluginClassLoader();
- implMasterObserver.postModifyNamespace(ctx, ns);
+ implMasterObserver.preModifyNamespace(ctx, ns);
} finally {
deactivatePluginClassLoader();
}
if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postModifyNamespace()");
+ LOG.debug("<== HBaseAtlasCoprocessor.preModifyNamespace()");
}
}
@Override
- public void preGetNamespaceDescriptor(ObserverContext ctx, String namespace) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preGetNamespaceDescriptor()");
+ public void postCloneSnapshot(ObserverContext observerContext, SnapshotDescription snapshot, TableDescriptor tableDescriptor) throws IOException {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("==> HBaseAtlasCoprocessor.postCloneSnapshot()");
}
try {
activatePluginClassLoader();
- implMasterObserver.preGetNamespaceDescriptor(ctx, namespace);
+ implMasterObserver.postCloneSnapshot(observerContext,snapshot,tableDescriptor);
} finally {
deactivatePluginClassLoader();
}
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preGetNamespaceDescriptor()");
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== HBaseAtlasCoprocessor.postCloneSnapshot()");
}
}
@Override
- public void postGetNamespaceDescriptor(ObserverContext ctx, NamespaceDescriptor ns) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postGetNamespaceDescriptor()");
+ public void postRestoreSnapshot(ObserverContext observerContext, SnapshotDescription snapshot, TableDescriptor tableDescriptor) throws IOException {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("==> HBaseAtlasCoprocessor.postRestoreSnapshot()");
}
try {
activatePluginClassLoader();
- implMasterObserver.postGetNamespaceDescriptor(ctx, ns);
+ implMasterObserver.postRestoreSnapshot(observerContext,snapshot,tableDescriptor);
} finally {
deactivatePluginClassLoader();
}
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postGetNamespaceDescriptor()");
- }
- }
-
- @Override
- public void preListNamespaceDescriptors(ObserverContext ctx, List descriptors) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preListNamespaceDescriptors()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preListNamespaceDescriptors(ctx, descriptors);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preListNamespaceDescriptors()");
- }
- }
-
- @Override
- public void postListNamespaceDescriptors(ObserverContext ctx, List descriptors) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postListNamespaceDescriptors()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postListNamespaceDescriptors(ctx, descriptors);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postListNamespaceDescriptors()");
- }
- }
-
- @Override
- public void preTableFlush(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preTableFlush()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.preTableFlush(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preTableFlush()");
- }
- }
-
- @Override
- public void postTableFlush(ObserverContext ctx, TableName tableName) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postTableFlush()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postTableFlush(ctx, tableName);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postTableFlush()");
- }
- }
-
- @Override
- public void postSetUserQuota(ObserverContext ctx, String userName, Quotas quotas) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postSetUserQuota()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postSetUserQuota(ctx, userName, quotas);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postSetUserQuota()");
- }
- }
-
- @Override
- public void postSetUserQuota(ObserverContext ctx, String userName, TableName tableName, Quotas quotas) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postSetUserQuota()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postSetUserQuota(ctx, userName, tableName, quotas);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postSetUserQuota()");
- }
- }
-
- @Override
- public void postSetUserQuota(ObserverContext ctx, String userName, String namespace, Quotas quotas) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postSetUserQuota()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postSetUserQuota(ctx, userName, quotas);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postSetUserQuota()");
- }
- }
-
- @Override
- public void postSetTableQuota(ObserverContext ctx, TableName tableName, Quotas quotas) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postSetTableQuota()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postSetTableQuota(ctx, tableName, quotas);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postSetTableQuota()");
- }
- }
-
- @Override
- public void postSetNamespaceQuota(ObserverContext ctx, String namespace, Quotas quotas) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postSetNamespaceQuota()");
- }
-
- try {
- activatePluginClassLoader();
- implMasterObserver.postSetNamespaceQuota(ctx, namespace, quotas);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postSetNamespaceQuota()");
- }
- }
-
- @Override
- public void preWALRestore(ObserverContext ctx, HRegionInfo info, HLogKey logKey, WALEdit logEdit) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.preWALRestore()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.preWALRestore(ctx, info, logKey, logEdit);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.preWALRestore()");
- }
- }
-
- @Override
- public void postWALRestore(ObserverContext ctx, HRegionInfo info, HLogKey logKey, WALEdit logEdit) throws IOException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postWALRestore()");
- }
-
- try {
- activatePluginClassLoader();
- implRegionObserver.postWALRestore(ctx, info, logKey, logEdit);
- } finally {
- deactivatePluginClassLoader();
- }
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postWALRestore()");
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== HBaseAtlasCoprocessor.postRestoreSnapshot()");
}
}
@@ -3587,17 +274,4 @@ public class HBaseAtlasCoprocessor implements MasterObserver, RegionObserver, Re
}
}
-
-
- // TODO : need override annotations for all of the following methods
- public void preMoveServers(final ObserverContext ctx, Set servers, String targetGroup) throws IOException {}
- public void postMoveServers(ObserverContext ctx, Set servers, String targetGroup) throws IOException {}
- public void preMoveTables(final ObserverContext ctx, Set tables, String targetGroup) throws IOException {}
- public void postMoveTables(final ObserverContext ctx, Set tables, String targetGroup) throws IOException {}
- public void preRemoveRSGroup(final ObserverContext ctx, String name) throws IOException {}
- public void postRemoveRSGroup(final ObserverContext ctx, String name) throws IOException {}
- public void preBalanceRSGroup(final ObserverContext ctx, String groupName) throws IOException {}
- public void postBalanceRSGroup(final ObserverContext ctx, String groupName, boolean balancerRan) throws IOException {}
- public void preAddRSGroup(ObserverContext ctx, String name) throws IOException {}
- public void postAddRSGroup(ObserverContext ctx, String name) throws IOException {}
}
diff --git a/addons/hbase-bridge/pom.xml b/addons/hbase-bridge/pom.xml
index 82f601001..a33bf30ca 100644
--- a/addons/hbase-bridge/pom.xml
+++ b/addons/hbase-bridge/pom.xml
@@ -31,8 +31,7 @@
jar
- 1.2.1
- 0.9.2-incubating
+ 3.0.3
@@ -51,19 +50,13 @@
org.mortbay.jetty
servlet-api-2.5
+
+ javax.ws.rs
+ *
+
| |
-
- org.apache.atlas
- atlas-client-v1
-
-
-
- org.apache.atlas
- atlas-client-v2
-
-
org.apache.atlas
atlas-notification
@@ -92,11 +85,13 @@
org.apache.hadoop
hadoop-client
+ ${hadoop.version}
org.apache.hadoop
hadoop-hdfs
+ ${hadoop.version}
javax.servlet
@@ -104,6 +99,11 @@
+
+ org.apache.hadoop
+ hadoop-hdfs-client
+ ${hadoop.version}
+
org.apache.hadoop
@@ -165,6 +165,13 @@
+
+ junit
+ junit
+ test
+ 4.12
+
+
org.apache.hbase
hbase-client
@@ -192,7 +199,6 @@
com.google.guava
guava
- 12.0.1
org.apache.hadoop
@@ -213,10 +219,32 @@
compile
- commons-fileupload
- commons-fileupload
- 1.3.3
+ org.apache.atlas
+ atlas-client-v2
+ ${project.version}
+
+ org.apache.hbase
+ hbase-zookeeper
+ test-jar
+ test
+ ${hbase.version}
+
+
+ org.apache.hbase
+ hbase-common
+ test-jar
+ ${hbase.version}
+ test
+
+
+
+
+ org.apache.hbase
+ hbase-testing-util
+ ${hbase.version}
+
+
@@ -245,11 +273,6 @@
${project.artifactId}
${project.version}
-
- ${project.groupId}
- atlas-client-v1
- ${project.version}
-
${project.groupId}
atlas-client-common
@@ -295,11 +318,6 @@
jersey-multipart
${jersey.version}
-
- org.scala-lang
- scala-library
- ${scala.version}
-
com.fasterxml.jackson.core
jackson-databind
@@ -320,11 +338,6 @@
commons-configuration
${commons-conf.version}
-
- org.apache.hbase
- hbase-common
- ${hbase.version}
-
com.sun.jersey
jersey-json
@@ -386,7 +399,6 @@
/
${project.basedir}/../../webapp/src/test/webapp/WEB-INF/web.xml
- ${project.basedir}/../../webapp/target/test-classes/
true
@@ -428,6 +440,18 @@
31001
${jetty-maven-plugin.stopWait}
+
+
+ org.apache.logging.log4j
+ log4j-core
+ 2.8
+
+
+ org.apache.logging.log4j
+ log4j-api
+ 2.8
+
+
start-jetty
@@ -502,7 +526,10 @@
${basedir}/../models
- true
+
+ 0000-Area0/**
+ 1000-Hadoop/**
+
diff --git a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java
index e7e918752..1825cd290 100644
--- a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java
+++ b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java
@@ -31,11 +31,12 @@ import org.apache.atlas.model.notification.HookNotification.EntityUpdateRequestV
import org.apache.atlas.type.AtlasTypeUtil;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.configuration.Configuration;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.NamespaceDescriptor;
-import org.apache.hadoop.hbase.ipc.RpcServer;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
@@ -45,6 +46,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -72,18 +74,22 @@ public class HBaseAtlasHook extends AtlasHook {
public static final String ATTR_TABLE_MAX_FILESIZE = "maxFileSize";
public static final String ATTR_TABLE_ISREADONLY = "isReadOnly";
public static final String ATTR_TABLE_ISCOMPACTION_ENABLED = "isCompactionEnabled";
+ public static final String ATTR_TABLE_ISNORMALIZATION_ENABLED = "isNormalizationEnabled";
public static final String ATTR_TABLE_REPLICATION_PER_REGION = "replicasPerRegion";
public static final String ATTR_TABLE_DURABLILITY = "durability";
+ public static final String ATTR_TABLE_NORMALIZATION_ENABLED = "isNormalizationEnabled";
// column family additional metadata
public static final String ATTR_CF_BLOOMFILTER_TYPE = "bloomFilterType";
public static final String ATTR_CF_COMPRESSION_TYPE = "compressionType";
public static final String ATTR_CF_COMPACTION_COMPRESSION_TYPE = "compactionCompressionType";
public static final String ATTR_CF_ENCRYPTION_TYPE = "encryptionType";
+ public static final String ATTR_CF_INMEMORY_COMPACTION_POLICY = "inMemoryCompactionPolicy";
public static final String ATTR_CF_KEEP_DELETE_CELLS = "keepDeletedCells";
public static final String ATTR_CF_MAX_VERSIONS = "maxVersions";
public static final String ATTR_CF_MIN_VERSIONS = "minVersions";
public static final String ATTR_CF_DATA_BLOCK_ENCODING = "dataBlockEncoding";
+ public static final String ATTR_CF_STORAGE_POLICY = "StoragePolicy";
public static final String ATTR_CF_TTL = "ttl";
public static final String ATTR_CF_BLOCK_CACHE_ENABLED = "blockCacheEnabled";
public static final String ATTR_CF_CACHED_BLOOM_ON_WRITE = "cacheBloomsOnWrite";
@@ -91,6 +97,9 @@ public class HBaseAtlasHook extends AtlasHook {
public static final String ATTR_CF_CACHED_INDEXES_ON_WRITE = "cacheIndexesOnWrite";
public static final String ATTR_CF_EVICT_BLOCK_ONCLOSE = "evictBlocksOnClose";
public static final String ATTR_CF_PREFETCH_BLOCK_ONOPEN = "prefetchBlocksOnOpen";
+ public static final String ATTR_CF_NEW_VERSION_BEHAVIOR = "newVersionBehavior";
+ public static final String ATTR_CF_MOB_ENABLED = "isMobEnabled";
+ public static final String ATTR_CF_MOB_COMPATCTPARTITION_POLICY = "mobCompactPartitionPolicy";
public static final String HBASE_NAMESPACE_QUALIFIED_NAME = "%s@%s";
public static final String HBASE_TABLE_QUALIFIED_NAME_FORMAT = "%s:%s@%s";
@@ -153,7 +162,7 @@ public class HBaseAtlasHook extends AtlasHook {
public void createAtlasInstances(HBaseOperationContext hbaseOperationContext) {
- HBaseAtlasHook.OPERATION operation = hbaseOperationContext.getOperation();
+ OPERATION operation = hbaseOperationContext.getOperation();
LOG.info("HBaseAtlasHook(operation={})", operation);
@@ -396,13 +405,15 @@ public class HBaseAtlasHook extends AtlasHook {
table.setAttribute(ATTR_PARAMETERS, hbaseOperationContext.getHbaseConf());
table.setAttribute(ATTR_NAMESPACE, AtlasTypeUtil.getAtlasObjectId(nameSpace));
- HTableDescriptor htableDescriptor = hbaseOperationContext.gethTableDescriptor();
- if (htableDescriptor != null) {
- table.setAttribute(ATTR_TABLE_MAX_FILESIZE, htableDescriptor.getMaxFileSize());
- table.setAttribute(ATTR_TABLE_REPLICATION_PER_REGION, htableDescriptor.getRegionReplication());
- table.setAttribute(ATTR_TABLE_ISREADONLY, htableDescriptor.isReadOnly());
- table.setAttribute(ATTR_TABLE_ISCOMPACTION_ENABLED, htableDescriptor.isCompactionEnabled());
- table.setAttribute(ATTR_TABLE_DURABLILITY, (htableDescriptor.getDurability() != null ? htableDescriptor.getDurability().name() : null));
+ TableDescriptor tableDescriptor = hbaseOperationContext.gethTableDescriptor();
+ if (tableDescriptor != null) {
+ table.setAttribute(ATTR_TABLE_MAX_FILESIZE, tableDescriptor.getMaxFileSize());
+ table.setAttribute(ATTR_TABLE_REPLICATION_PER_REGION, tableDescriptor.getRegionReplication());
+ table.setAttribute(ATTR_TABLE_ISREADONLY, tableDescriptor.isReadOnly());
+ table.setAttribute(ATTR_TABLE_ISNORMALIZATION_ENABLED, tableDescriptor.isNormalizationEnabled());
+ table.setAttribute(ATTR_TABLE_ISCOMPACTION_ENABLED, tableDescriptor.isCompactionEnabled());
+ table.setAttribute(ATTR_TABLE_DURABLILITY, (tableDescriptor.getDurability() != null ? tableDescriptor.getDurability().name() : null));
+ table.setAttribute(ATTR_TABLE_NORMALIZATION_ENABLED, tableDescriptor.isNormalizationEnabled());
}
switch (operation) {
@@ -426,11 +437,11 @@ public class HBaseAtlasHook extends AtlasHook {
private List buildColumnFamilies(HBaseOperationContext hbaseOperationContext, AtlasEntity nameSpace, AtlasEntity table) {
List columnFamilies = new ArrayList<>();
- HColumnDescriptor[] hColumnDescriptors = hbaseOperationContext.gethColumnDescriptors();
+ ColumnFamilyDescriptor[] columnFamilyDescriptors = hbaseOperationContext.gethColumnDescriptors();
- if (hColumnDescriptors != null) {
- for (HColumnDescriptor hColumnDescriptor : hColumnDescriptors) {
- AtlasEntity columnFamily = buildColumnFamily(hbaseOperationContext, hColumnDescriptor, nameSpace, table);
+ if (columnFamilyDescriptors != null) {
+ for (ColumnFamilyDescriptor columnFamilyDescriptor : columnFamilyDescriptors) {
+ AtlasEntity columnFamily = buildColumnFamily(hbaseOperationContext, columnFamilyDescriptor, nameSpace, table);
columnFamilies.add(columnFamily);
}
@@ -439,9 +450,9 @@ public class HBaseAtlasHook extends AtlasHook {
return columnFamilies;
}
- private AtlasEntity buildColumnFamily(HBaseOperationContext hbaseOperationContext, HColumnDescriptor hColumnDescriptor, AtlasEntity nameSpace, AtlasEntity table) {
+ private AtlasEntity buildColumnFamily(HBaseOperationContext hbaseOperationContext, ColumnFamilyDescriptor columnFamilyDescriptor, AtlasEntity nameSpace, AtlasEntity table) {
AtlasEntity columnFamily = new AtlasEntity(HBaseDataTypes.HBASE_COLUMN_FAMILY.getName());
- String columnFamilyName = hColumnDescriptor.getNameAsString();
+ String columnFamilyName = columnFamilyDescriptor.getNameAsString();
String tableName = (String) table.getAttribute(ATTR_NAME);
String nameSpaceName = (String) nameSpace.getAttribute(ATTR_NAME);
String columnFamilyQName = getColumnFamilyQualifiedName(clusterName, nameSpaceName, tableName, columnFamilyName);
@@ -453,22 +464,27 @@ public class HBaseAtlasHook extends AtlasHook {
columnFamily.setAttribute(ATTR_OWNER, hbaseOperationContext.getOwner());
columnFamily.setAttribute(ATTR_TABLE, AtlasTypeUtil.getAtlasObjectId(table));
- if (hColumnDescriptor!= null) {
- columnFamily.setAttribute(ATTR_CF_BLOCK_CACHE_ENABLED, hColumnDescriptor.isBlockCacheEnabled());
- columnFamily.setAttribute(ATTR_CF_BLOOMFILTER_TYPE, (hColumnDescriptor.getBloomFilterType() != null ? hColumnDescriptor.getBloomFilterType().name():null));
- columnFamily.setAttribute(ATTR_CF_CACHED_BLOOM_ON_WRITE, hColumnDescriptor.isCacheBloomsOnWrite());
- columnFamily.setAttribute(ATTR_CF_CACHED_DATA_ON_WRITE, hColumnDescriptor.isCacheDataOnWrite());
- columnFamily.setAttribute(ATTR_CF_CACHED_INDEXES_ON_WRITE, hColumnDescriptor.isCacheIndexesOnWrite());
- columnFamily.setAttribute(ATTR_CF_COMPACTION_COMPRESSION_TYPE, (hColumnDescriptor.getCompactionCompressionType() != null ? hColumnDescriptor.getCompactionCompressionType().name():null));
- columnFamily.setAttribute(ATTR_CF_COMPRESSION_TYPE, (hColumnDescriptor.getCompressionType() != null ? hColumnDescriptor.getCompressionType().name():null));
- columnFamily.setAttribute(ATTR_CF_DATA_BLOCK_ENCODING, (hColumnDescriptor.getDataBlockEncoding() != null ? hColumnDescriptor.getDataBlockEncoding().name():null));
- columnFamily.setAttribute(ATTR_CF_ENCRYPTION_TYPE, hColumnDescriptor.getEncryptionType());
- columnFamily.setAttribute(ATTR_CF_EVICT_BLOCK_ONCLOSE, hColumnDescriptor.isEvictBlocksOnClose());
- columnFamily.setAttribute(ATTR_CF_KEEP_DELETE_CELLS, ( hColumnDescriptor.getKeepDeletedCells() != null ? hColumnDescriptor.getKeepDeletedCells().name():null));
- columnFamily.setAttribute(ATTR_CF_MAX_VERSIONS, hColumnDescriptor.getMaxVersions());
- columnFamily.setAttribute(ATTR_CF_MIN_VERSIONS, hColumnDescriptor.getMinVersions());
- columnFamily.setAttribute(ATTR_CF_PREFETCH_BLOCK_ONOPEN, hColumnDescriptor.isPrefetchBlocksOnOpen());
- columnFamily.setAttribute(ATTR_CF_TTL, hColumnDescriptor.getTimeToLive());
+ if (columnFamilyDescriptor!= null) {
+ columnFamily.setAttribute(ATTR_CF_BLOCK_CACHE_ENABLED, columnFamilyDescriptor.isBlockCacheEnabled());
+ columnFamily.setAttribute(ATTR_CF_BLOOMFILTER_TYPE, (columnFamilyDescriptor.getBloomFilterType() != null ? columnFamilyDescriptor.getBloomFilterType().name():null));
+ columnFamily.setAttribute(ATTR_CF_CACHED_BLOOM_ON_WRITE, columnFamilyDescriptor.isCacheBloomsOnWrite());
+ columnFamily.setAttribute(ATTR_CF_CACHED_DATA_ON_WRITE, columnFamilyDescriptor.isCacheDataOnWrite());
+ columnFamily.setAttribute(ATTR_CF_CACHED_INDEXES_ON_WRITE, columnFamilyDescriptor.isCacheIndexesOnWrite());
+ columnFamily.setAttribute(ATTR_CF_COMPACTION_COMPRESSION_TYPE, (columnFamilyDescriptor.getCompactionCompressionType() != null ? columnFamilyDescriptor.getCompactionCompressionType().name():null));
+ columnFamily.setAttribute(ATTR_CF_COMPRESSION_TYPE, (columnFamilyDescriptor.getCompressionType() != null ? columnFamilyDescriptor.getCompressionType().name():null));
+ columnFamily.setAttribute(ATTR_CF_DATA_BLOCK_ENCODING, (columnFamilyDescriptor.getDataBlockEncoding() != null ? columnFamilyDescriptor.getDataBlockEncoding().name():null));
+ columnFamily.setAttribute(ATTR_CF_ENCRYPTION_TYPE, columnFamilyDescriptor.getEncryptionType());
+ columnFamily.setAttribute(ATTR_CF_EVICT_BLOCK_ONCLOSE, columnFamilyDescriptor.isEvictBlocksOnClose());
+ columnFamily.setAttribute(ATTR_CF_INMEMORY_COMPACTION_POLICY, (columnFamilyDescriptor.getInMemoryCompaction() != null ? columnFamilyDescriptor.getInMemoryCompaction().name():null));
+ columnFamily.setAttribute(ATTR_CF_KEEP_DELETE_CELLS, ( columnFamilyDescriptor.getKeepDeletedCells() != null ? columnFamilyDescriptor.getKeepDeletedCells().name():null));
+ columnFamily.setAttribute(ATTR_CF_MAX_VERSIONS, columnFamilyDescriptor.getMaxVersions());
+ columnFamily.setAttribute(ATTR_CF_MIN_VERSIONS, columnFamilyDescriptor.getMinVersions());
+ columnFamily.setAttribute(ATTR_CF_NEW_VERSION_BEHAVIOR, columnFamilyDescriptor.isNewVersionBehavior());
+ columnFamily.setAttribute(ATTR_CF_MOB_ENABLED, columnFamilyDescriptor.isMobEnabled());
+ columnFamily.setAttribute(ATTR_CF_MOB_COMPATCTPARTITION_POLICY, ( columnFamilyDescriptor.getMobCompactPartitionPolicy() != null ? columnFamilyDescriptor.getMobCompactPartitionPolicy().name():null));
+ columnFamily.setAttribute(ATTR_CF_PREFETCH_BLOCK_ONOPEN, columnFamilyDescriptor.isPrefetchBlocksOnOpen());
+ columnFamily.setAttribute(ATTR_CF_STORAGE_POLICY, columnFamilyDescriptor.getStoragePolicy());
+ columnFamily.setAttribute(ATTR_CF_TTL, columnFamilyDescriptor.getTimeToLive());
}
switch (hbaseOperationContext.getOperation()) {
@@ -497,21 +513,24 @@ public class HBaseAtlasHook extends AtlasHook {
if (tableName != null) {
ret = tableName.getNameAsString();
} else {
- HTableDescriptor tableDescriptor = hbaseOperationContext.gethTableDescriptor();
+ TableDescriptor tableDescriptor = hbaseOperationContext.gethTableDescriptor();
- ret = (tableDescriptor != null) ? tableDescriptor.getNameAsString() : null;
+ ret = (tableDescriptor != null) ? tableDescriptor.getTableName().getNameAsString() : null;
}
return ret;
}
- public void sendHBaseNameSpaceOperation(final NamespaceDescriptor namespaceDescriptor, final String nameSpace, final OPERATION operation) {
+ public void sendHBaseNameSpaceOperation(final NamespaceDescriptor namespaceDescriptor, final String nameSpace, final OPERATION operation, ObserverContext ctx) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasHook.sendHBaseNameSpaceOperation()");
}
try {
- HBaseOperationContext hbaseOperationContext = handleHBaseNameSpaceOperation(namespaceDescriptor, nameSpace, operation);
+ final UserGroupInformation ugi = getUGI(ctx);
+ final User user = getActiveUser(ctx);
+ final String userName = (user != null) ? user.getShortName() : null;
+ HBaseOperationContext hbaseOperationContext = handleHBaseNameSpaceOperation(namespaceDescriptor, nameSpace, operation, ugi, userName);
sendNotification(hbaseOperationContext);
} catch (Throwable t) {
@@ -523,13 +542,16 @@ public class HBaseAtlasHook extends AtlasHook {
}
}
- public void sendHBaseTableOperation(final HTableDescriptor hTableDescriptor, final TableName tableName, final OPERATION operation) {
+ public void sendHBaseTableOperation(TableDescriptor tableDescriptor, final TableName tableName, final OPERATION operation, ObserverContext ctx) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasHook.sendHBaseTableOperation()");
}
try {
- HBaseOperationContext hbaseOperationContext = handleHBaseTableOperation(hTableDescriptor, tableName, operation);
+ final UserGroupInformation ugi = getUGI(ctx);
+ final User user = getActiveUser(ctx);
+ final String userName = (user != null) ? user.getShortName() : null;
+ HBaseOperationContext hbaseOperationContext = handleHBaseTableOperation(tableDescriptor, tableName, operation, ugi, userName);
sendNotification(hbaseOperationContext);
} catch (Throwable t) {
@@ -541,24 +563,6 @@ public class HBaseAtlasHook extends AtlasHook {
}
}
- public void sendHBaseColumnFamilyOperation(final HColumnDescriptor hColumnDescriptor, final TableName tableName, final String columnFamily, final OPERATION operation) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasHook.sendHBaseColumnFamilyOperation()");
- }
-
- try {
- HBaseOperationContext hbaseOperationContext = handleHBaseColumnFamilyOperation(hColumnDescriptor, tableName, columnFamily, operation);
-
- sendNotification(hbaseOperationContext);
- } catch (Throwable t) {
- LOG.error("<== HBaseAtlasHook.sendHBaseColumnFamilyOperation(): failed to send notification", t);
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasHook.sendHBaseColumnFamilyOperation()");
- }
- }
-
private void sendNotification(HBaseOperationContext hbaseOperationContext) {
UserGroupInformation ugi = hbaseOperationContext.getUgi();
@@ -569,15 +573,11 @@ public class HBaseAtlasHook extends AtlasHook {
notifyEntities(hbaseOperationContext.getMessages(), ugi);
}
- private HBaseOperationContext handleHBaseNameSpaceOperation(NamespaceDescriptor namespaceDescriptor, String nameSpace, OPERATION operation) {
+ private HBaseOperationContext handleHBaseNameSpaceOperation(NamespaceDescriptor namespaceDescriptor, String nameSpace, OPERATION operation, UserGroupInformation ugi, String userName) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasHook.handleHBaseNameSpaceOperation()");
}
- UserGroupInformation ugi = getUGI();
- User user = getActiveUser();
- String userName = (user != null) ? user.getShortName() : null;
-
HBaseOperationContext hbaseOperationContext = new HBaseOperationContext(namespaceDescriptor, nameSpace, operation, ugi, userName, userName);
createAtlasInstances(hbaseOperationContext);
@@ -588,24 +588,21 @@ public class HBaseAtlasHook extends AtlasHook {
return hbaseOperationContext;
}
- private HBaseOperationContext handleHBaseTableOperation(HTableDescriptor hTableDescriptor, TableName tableName, OPERATION operation) {
+ private HBaseOperationContext handleHBaseTableOperation(TableDescriptor tableDescriptor, TableName tableName, OPERATION operation, UserGroupInformation ugi, String userName) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasHook.handleHBaseTableOperation()");
}
- UserGroupInformation ugi = getUGI();
- User user = getActiveUser();
- String userName = (user != null) ? user.getShortName() : null;
Map hbaseConf = null;
String owner = null;
String tableNameSpace = null;
TableName hbaseTableName = null;
- HColumnDescriptor[] hColumnDescriptors = null;
+ ColumnFamilyDescriptor[] columnFamilyDescriptors = null;
- if (hTableDescriptor != null) {
- owner = hTableDescriptor.getOwnerString();
- hbaseConf = hTableDescriptor.getConfiguration();
- hbaseTableName = hTableDescriptor.getTableName();
+ if (tableDescriptor != null) {
+ owner = tableDescriptor.getOwnerString();
+ hbaseConf = null;
+ hbaseTableName = tableDescriptor.getTableName();
if (hbaseTableName != null) {
tableNameSpace = hbaseTableName.getNamespaceAsString();
if (tableNameSpace == null) {
@@ -618,11 +615,11 @@ public class HBaseAtlasHook extends AtlasHook {
owner = userName;
}
- if (hTableDescriptor != null) {
- hColumnDescriptors = hTableDescriptor.getColumnFamilies();
+ if (tableDescriptor != null) {
+ columnFamilyDescriptors = tableDescriptor.getColumnFamilies();
}
- HBaseOperationContext hbaseOperationContext = new HBaseOperationContext(tableNameSpace, hTableDescriptor, tableName, hColumnDescriptors, operation, ugi, userName, owner, hbaseConf);
+ HBaseOperationContext hbaseOperationContext = new HBaseOperationContext(tableNameSpace, tableDescriptor, tableName, columnFamilyDescriptors, operation, ugi, userName, owner, hbaseConf);
createAtlasInstances(hbaseOperationContext);
if (LOG.isDebugEnabled()) {
@@ -631,27 +628,24 @@ public class HBaseAtlasHook extends AtlasHook {
return hbaseOperationContext;
}
- private HBaseOperationContext handleHBaseColumnFamilyOperation(HColumnDescriptor hColumnDescriptor, TableName tableName, String columnFamily, OPERATION operation) {
+ private HBaseOperationContext handleHBaseColumnFamilyOperation(ColumnFamilyDescriptor columnFamilyDescriptor, TableName tableName, String columnFamily, OPERATION operation, UserGroupInformation ugi, String userName) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasHook.handleHBaseColumnFamilyOperation()");
}
- UserGroupInformation ugi = getUGI();
- User user = getActiveUser();
- String userName = (user != null) ? user.getShortName() : null;
String owner = userName;
- Map hbaseConf = null;
+ Map hbaseConf = new HashMap<>();
String tableNameSpace = tableName.getNamespaceAsString();
if (tableNameSpace == null) {
tableNameSpace = tableName.getNameWithNamespaceInclAsString();
}
- if (hColumnDescriptor != null) {
- hbaseConf = hColumnDescriptor.getConfiguration();
+ if (columnFamilyDescriptor != null) {
+ hbaseConf = columnFamilyDescriptor.getConfiguration();
}
- HBaseOperationContext hbaseOperationContext = new HBaseOperationContext(tableNameSpace, tableName, hColumnDescriptor, columnFamily, operation, ugi, userName, owner, hbaseConf);
+ HBaseOperationContext hbaseOperationContext = new HBaseOperationContext(tableNameSpace, tableName, columnFamilyDescriptor, columnFamily, operation, ugi, userName, owner, hbaseConf);
createAtlasInstances(hbaseOperationContext);
if (LOG.isDebugEnabled()) {
@@ -660,26 +654,12 @@ public class HBaseAtlasHook extends AtlasHook {
return hbaseOperationContext;
}
- private User getActiveUser() {
- User user = RpcServer.getRequestUser();
- if (user == null) {
- // for non-rpc handling, fallback to system user
- try {
- user = User.getCurrent();
- } catch (IOException e) {
- LOG.error("Unable to find the current user");
- user = null;
- }
- }
- return user;
- }
-
- private UserGroupInformation getUGI() {
+ private UserGroupInformation getUGI(ObserverContext> ctx) {
UserGroupInformation ugi = null;
- User user = getActiveUser();
-
+ User user = null;
try {
- ugi = UserGroupInformation.getLoginUser();
+ user = getActiveUser(ctx);
+ ugi = UserGroupInformation.getLoginUser();
} catch (Exception e) {
// not setting the UGI here
}
@@ -693,4 +673,8 @@ public class HBaseAtlasHook extends AtlasHook {
LOG.info("HBaseAtlasHook: UGI: {}", ugi);
return ugi;
}
+
+ private User getActiveUser(ObserverContext> ctx) throws IOException {
+ return (User)ctx.getCaller().orElse(User.getCurrent());
+ }
}
diff --git a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseBridge.java b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseBridge.java
index 8372f0261..17d617d19 100644
--- a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseBridge.java
+++ b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseBridge.java
@@ -39,10 +39,14 @@ import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -84,6 +88,7 @@ public class HBaseBridge {
private static final String ATTR_TABLE_ISCOMPACTION_ENABLED = "isCompactionEnabled";
private static final String ATTR_TABLE_REPLICATION_PER_REGION = "replicasPerRegion";
private static final String ATTR_TABLE_DURABLILITY = "durability";
+ private static final String ATTR_TABLE_NORMALIZATION_ENABLED = "isNormalizationEnabled";
// column family metadata
private static final String ATTR_CF_BLOOMFILTER_TYPE = "bloomFilterType";
@@ -102,6 +107,10 @@ public class HBaseBridge {
private static final String ATTR_CF_EVICT_BLOCK_ONCLOSE = "evictBlocksOnClose";
private static final String ATTR_CF_PREFETCH_BLOCK_ONOPEN = "prefetchBlocksOnOpen";
private static final String ATTRIBUTE_QUALIFIED_NAME = "qualifiedName";
+ private static final String ATTR_CF_INMEMORY_COMPACTION_POLICY = "inMemoryCompactionPolicy";
+ private static final String ATTR_CF_MOB_COMPATCTPARTITION_POLICY = "mobCompactPartitionPolicy";
+ private static final String ATTR_CF_MOB_ENABLED = "isMobEnabled";
+ private static final String ATTR_CF_NEW_VERSION_BEHAVIOR = "newVersionBehavior";
private static final String HBASE_NAMESPACE_QUALIFIED_NAME = "%s@%s";
private static final String HBASE_TABLE_QUALIFIED_NAME_FORMAT = "%s:%s@%s";
@@ -109,7 +118,7 @@ public class HBaseBridge {
private final String clusterName;
private final AtlasClientV2 atlasClientV2;
- private final HBaseAdmin hbaseAdmin;
+ private final Admin hbaseAdmin;
public static void main(String[] args) {
@@ -199,11 +208,13 @@ public class HBaseBridge {
LOG.info("checking HBase availability..");
- HBaseAdmin.checkHBaseAvailable(conf);
+ HBaseAdmin.available(conf);
LOG.info("HBase is available");
- hbaseAdmin = new HBaseAdmin(conf);
+ Connection conn = ConnectionFactory.createConnection(conf);
+
+ hbaseAdmin = conn.getAdmin();
}
private boolean importHBaseEntities(String namespaceToImport, String tableToImport) throws Exception {
@@ -238,11 +249,11 @@ public class HBaseBridge {
}
public void importTable(final String tableName) throws Exception {
- String tableNameStr = null;
- HTableDescriptor[] htds = hbaseAdmin.listTables(Pattern.compile(tableName));
+ String tableNameStr = null;
+ TableDescriptor[] htds = hbaseAdmin.listTables(Pattern.compile(tableName));
if (ArrayUtils.isNotEmpty(htds)) {
- for (HTableDescriptor htd : htds) {
+ for (TableDescriptor htd : htds) {
String tblNameWithNameSpace = htd.getTableName().getNameWithNamespaceInclAsString();
String tblNameWithOutNameSpace = htd.getTableName().getNameAsString();
@@ -263,7 +274,7 @@ public class HBaseBridge {
String nsName = new String(nsByte);
NamespaceDescriptor nsDescriptor = hbaseAdmin.getNamespaceDescriptor(nsName);
AtlasEntityWithExtInfo entity = createOrUpdateNameSpace(nsDescriptor);
- HColumnDescriptor[] hcdts = htd.getColumnFamilies();
+ ColumnFamilyDescriptor[] hcdts = htd.getColumnFamilies();
createOrUpdateTable(nsName, tableNameStr, entity.getEntity(), htd, hcdts);
}
@@ -283,11 +294,11 @@ public class HBaseBridge {
}
}
- HTableDescriptor[] htds = hbaseAdmin.listTables();
+ TableDescriptor[] htds = hbaseAdmin.listTables();
if (ArrayUtils.isNotEmpty(htds)) {
- for (HTableDescriptor htd : htds) {
- String tableName = htd.getNameAsString();
+ for (TableDescriptor htd : htds) {
+ String tableName = htd.getTableName().getNameAsString();
importTable(tableName);
}
@@ -297,7 +308,7 @@ public class HBaseBridge {
private void importNameSpaceWithTable(String namespaceToImport, String tableToImport) throws Exception {
importNameSpace(namespaceToImport);
- List hTableDescriptors = new ArrayList<>();
+ List hTableDescriptors = new ArrayList<>();
if (StringUtils.isEmpty(tableToImport)) {
List matchingNameSpaceDescriptors = getMatchingNameSpaces(namespaceToImport);
@@ -308,13 +319,13 @@ public class HBaseBridge {
} else {
tableToImport = namespaceToImport +":" + tableToImport;
- HTableDescriptor[] htds = hbaseAdmin.listTables(Pattern.compile(tableToImport));
+ TableDescriptor[] htds = hbaseAdmin.listTables(Pattern.compile(tableToImport));
hTableDescriptors.addAll(Arrays.asList(htds));
}
if (CollectionUtils.isNotEmpty(hTableDescriptors)) {
- for (HTableDescriptor htd : hTableDescriptors) {
+ for (TableDescriptor htd : hTableDescriptors) {
String tblName = htd.getTableName().getNameAsString();
importTable(tblName);
@@ -339,11 +350,11 @@ public class HBaseBridge {
return ret;
}
- private List getTableDescriptors(List namespaceDescriptors) throws Exception {
- List ret = new ArrayList<>();
+ private List getTableDescriptors(List namespaceDescriptors) throws Exception {
+ List ret = new ArrayList<>();
for(NamespaceDescriptor namespaceDescriptor:namespaceDescriptors) {
- HTableDescriptor[] tableDescriptors = hbaseAdmin.listTableDescriptorsByNamespace(namespaceDescriptor.getName());
+ TableDescriptor[] tableDescriptors = hbaseAdmin.listTableDescriptorsByNamespace(namespaceDescriptor.getName());
ret.addAll(Arrays.asList(tableDescriptors));
}
@@ -374,7 +385,7 @@ public class HBaseBridge {
return nsEntity;
}
- protected AtlasEntityWithExtInfo createOrUpdateTable(String nameSpace, String tableName, AtlasEntity nameSapceEntity, HTableDescriptor htd, HColumnDescriptor[] hcdts) throws Exception {
+ protected AtlasEntityWithExtInfo createOrUpdateTable(String nameSpace, String tableName, AtlasEntity nameSapceEntity, TableDescriptor htd, ColumnFamilyDescriptor[] hcdts) throws Exception {
String owner = htd.getOwnerString();
String tblQualifiedName = getTableQualifiedName(clusterName, nameSpace, tableName);
AtlasEntityWithExtInfo ret = findTableEntityInAtlas(tblQualifiedName);
@@ -414,13 +425,13 @@ public class HBaseBridge {
return ret;
}
- protected List createOrUpdateColumnFamilies(String nameSpace, String tableName, String owner, HColumnDescriptor[] hcdts , AtlasEntity tableEntity) throws Exception {
+ protected List createOrUpdateColumnFamilies(String nameSpace, String tableName, String owner, ColumnFamilyDescriptor[] hcdts , AtlasEntity tableEntity) throws Exception {
List ret = new ArrayList<>();
if (hcdts != null) {
AtlasObjectId tableId = AtlasTypeUtil.getAtlasObjectId(tableEntity);
- for (HColumnDescriptor columnFamilyDescriptor : hcdts) {
+ for (ColumnFamilyDescriptor columnFamilyDescriptor : hcdts) {
String cfName = columnFamilyDescriptor.getNameAsString();
String cfQualifiedName = getColumnFamilyQualifiedName(clusterName, nameSpace, tableName, cfName);
AtlasEntityWithExtInfo cfEntity = findColumnFamiltyEntityInAtlas(cfQualifiedName);
@@ -512,7 +523,7 @@ public class HBaseBridge {
return ret;
}
- private AtlasEntity getTableEntity(String nameSpace, String tableName, String owner, AtlasEntity nameSpaceEntity, HTableDescriptor htd, AtlasEntity atlasEntity) {
+ private AtlasEntity getTableEntity(String nameSpace, String tableName, String owner, AtlasEntity nameSpaceEntity, TableDescriptor htd, AtlasEntity atlasEntity) {
AtlasEntity ret = null;
if (atlasEntity == null) {
@@ -535,11 +546,12 @@ public class HBaseBridge {
ret.setAttribute(ATTR_TABLE_ISREADONLY, htd.isReadOnly());
ret.setAttribute(ATTR_TABLE_ISCOMPACTION_ENABLED, htd.isCompactionEnabled());
ret.setAttribute(ATTR_TABLE_DURABLILITY, (htd.getDurability() != null ? htd.getDurability().name() : null));
+ ret.setAttribute(ATTR_TABLE_NORMALIZATION_ENABLED, htd.isNormalizationEnabled());
return ret;
}
- private AtlasEntity getColumnFamilyEntity(String nameSpace, String tableName, String owner, HColumnDescriptor hcdt, AtlasObjectId tableId, AtlasEntity atlasEntity){
+ private AtlasEntity getColumnFamilyEntity(String nameSpace, String tableName, String owner, ColumnFamilyDescriptor hcdt, AtlasObjectId tableId, AtlasEntity atlasEntity){
AtlasEntity ret = null;
if (atlasEntity == null) {
@@ -572,6 +584,10 @@ public class HBaseBridge {
ret.setAttribute(ATTR_CF_MIN_VERSIONS, hcdt.getMinVersions());
ret.setAttribute(ATTR_CF_PREFETCH_BLOCK_ONOPEN, hcdt.isPrefetchBlocksOnOpen());
ret.setAttribute(ATTR_CF_TTL, hcdt.getTimeToLive());
+ ret.setAttribute(ATTR_CF_INMEMORY_COMPACTION_POLICY, (hcdt.getInMemoryCompaction() != null ? hcdt.getInMemoryCompaction().name():null));
+ ret.setAttribute(ATTR_CF_MOB_COMPATCTPARTITION_POLICY, ( hcdt.getMobCompactPartitionPolicy() != null ? hcdt.getMobCompactPartitionPolicy().name():null));
+ ret.setAttribute(ATTR_CF_MOB_ENABLED,hcdt.isMobEnabled());
+ ret.setAttribute(ATTR_CF_NEW_VERSION_BEHAVIOR,hcdt.isNewVersionBehavior());
return ret;
}
diff --git a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
index af8afd4e2..313132de6 100644
--- a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
+++ b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
@@ -20,21 +20,24 @@ package org.apache.atlas.hbase.hook;
import org.apache.atlas.hbase.bridge.HBaseAtlasHook;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.SnapshotDescription;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.MasterObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
-import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.coprocessor.RegionObserver;
+import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
-public class HBaseAtlasCoprocessor extends HBaseAtlasCoprocessorBase {
+public class HBaseAtlasCoprocessor implements MasterCoprocessor, MasterObserver, RegionObserver, RegionServerObserver {
private static final Logger LOG = LoggerFactory.getLogger(HBaseAtlasCoprocessor.class);
final HBaseAtlasHook hbaseAtlasHook;
@@ -44,81 +47,38 @@ public class HBaseAtlasCoprocessor extends HBaseAtlasCoprocessorBase {
}
@Override
- public void postCreateTable(ObserverContext observerContext, HTableDescriptor hTableDescriptor, HRegionInfo[] hRegionInfos) throws IOException {
+ public void postCreateTable(ObserverContext observerContext, TableDescriptor tableDescriptor, RegionInfo[] hRegionInfos) throws IOException {
+ LOG.info("==> HBaseAtlasCoprocessor.postCreateTable()");
+
+ hbaseAtlasHook.sendHBaseTableOperation(tableDescriptor, null, HBaseAtlasHook.OPERATION.CREATE_TABLE, observerContext);
if (LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessoror.postCreateTable()");
- }
- hbaseAtlasHook.sendHBaseTableOperation(hTableDescriptor, null, HBaseAtlasHook.OPERATION.CREATE_TABLE);
- if (LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessoror.postCreateTable()");
+ LOG.debug("<== HBaseAtlasCoprocessor.postCreateTable()");
}
}
@Override
public void postDeleteTable(ObserverContext observerContext, TableName tableName) throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postDeleteTable()");
- }
- hbaseAtlasHook.sendHBaseTableOperation(null, tableName, HBaseAtlasHook.OPERATION.DELETE_TABLE);
+ LOG.info("==> HBaseAtlasCoprocessor.postDeleteTable()");
+ hbaseAtlasHook.sendHBaseTableOperation(null, tableName, HBaseAtlasHook.OPERATION.DELETE_TABLE, observerContext);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessor.postDeleteTable()");
}
}
@Override
- public void postModifyTable(ObserverContext observerContext, TableName tableName, HTableDescriptor hTableDescriptor) throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postModifyTable()");
- }
- hbaseAtlasHook.sendHBaseTableOperation(hTableDescriptor, tableName, HBaseAtlasHook.OPERATION.ALTER_TABLE);
+ public void postModifyTable(ObserverContext observerContext, TableName tableName, TableDescriptor tableDescriptor) throws IOException {
+ LOG.info("==> HBaseAtlasCoprocessor.postModifyTable()");
+ hbaseAtlasHook.sendHBaseTableOperation(tableDescriptor, tableName, HBaseAtlasHook.OPERATION.ALTER_TABLE, observerContext);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessor.postModifyTable()");
}
}
- @Override
- public void postAddColumn(ObserverContext observerContext, TableName tableName, HColumnDescriptor hColumnDescriptor) throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postAddColumn()");
- }
- hbaseAtlasHook.sendHBaseColumnFamilyOperation(hColumnDescriptor, tableName, null, HBaseAtlasHook.OPERATION.CREATE_COLUMN_FAMILY);
- if (LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postAddColumn()");
- }
- }
-
- @Override
- public void postModifyColumn(ObserverContext observerContext, TableName tableName, HColumnDescriptor hColumnDescriptor) throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postModifyColumn()");
- }
- hbaseAtlasHook.sendHBaseColumnFamilyOperation(hColumnDescriptor, tableName, null, HBaseAtlasHook.OPERATION.ALTER_COLUMN_FAMILY);
- if (LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postModifyColumn()");
- }
- }
-
- @Override
- public void postDeleteColumn(ObserverContext observerContext, TableName tableName, byte[] bytes) throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postDeleteColumn()");
- }
-
- String columnFamily = Bytes.toString(bytes);
- hbaseAtlasHook.sendHBaseColumnFamilyOperation(null, tableName, columnFamily, HBaseAtlasHook.OPERATION.DELETE_COLUMN_FAMILY);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessor.postDeleteColumn()");
- }
- }
-
@Override
public void postCreateNamespace(ObserverContext observerContext, NamespaceDescriptor namespaceDescriptor) throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postCreateNamespace()");
- }
+ LOG.info("==> HBaseAtlasCoprocessor.postCreateNamespace()");
- hbaseAtlasHook.sendHBaseNameSpaceOperation(namespaceDescriptor, null, HBaseAtlasHook.OPERATION.CREATE_NAMESPACE);
+ hbaseAtlasHook.sendHBaseNameSpaceOperation(namespaceDescriptor, null, HBaseAtlasHook.OPERATION.CREATE_NAMESPACE, observerContext);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessor.postCreateNamespace()");
@@ -127,11 +87,9 @@ public class HBaseAtlasCoprocessor extends HBaseAtlasCoprocessorBase {
@Override
public void postDeleteNamespace(ObserverContext observerContext, String s) throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postDeleteNamespace()");
- }
+ LOG.info("==> HBaseAtlasCoprocessor.postDeleteNamespace()");
- hbaseAtlasHook.sendHBaseNameSpaceOperation(null, s, HBaseAtlasHook.OPERATION.DELETE_NAMESPACE);
+ hbaseAtlasHook.sendHBaseNameSpaceOperation(null, s, HBaseAtlasHook.OPERATION.DELETE_NAMESPACE, observerContext);
if (LOG.isDebugEnabled()) {
LOG.debug("==> HBaseAtlasCoprocessor.postDeleteNamespace()");
@@ -140,11 +98,9 @@ public class HBaseAtlasCoprocessor extends HBaseAtlasCoprocessorBase {
@Override
public void postModifyNamespace(ObserverContext observerContext, NamespaceDescriptor namespaceDescriptor) throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postModifyNamespace()");
- }
+ LOG.info("==> HBaseAtlasCoprocessor.postModifyNamespace()");
- hbaseAtlasHook.sendHBaseNameSpaceOperation(namespaceDescriptor, null, HBaseAtlasHook.OPERATION.ALTER_NAMESPACE);
+ hbaseAtlasHook.sendHBaseNameSpaceOperation(namespaceDescriptor, null, HBaseAtlasHook.OPERATION.ALTER_NAMESPACE, observerContext);
if (LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessor.postModifyNamespace()");
@@ -152,23 +108,22 @@ public class HBaseAtlasCoprocessor extends HBaseAtlasCoprocessorBase {
}
@Override
- public void postCloneSnapshot(ObserverContext observerContext, HBaseProtos.SnapshotDescription snapshotDescription, HTableDescriptor hTableDescriptor) throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessoror.postCloneSnapshot()");
- }
- hbaseAtlasHook.sendHBaseTableOperation(hTableDescriptor, null, HBaseAtlasHook.OPERATION.CREATE_TABLE);
- if (LOG.isDebugEnabled()) {
- LOG.debug("<== HBaseAtlasCoprocessoror.postCloneSnapshot()");
- }
+ public void postCloneSnapshot(ObserverContext observerContext, SnapshotDescription snapshot, TableDescriptor tableDescriptor) throws IOException {
+ LOG.info("==> HBaseAtlasCoprocessor.postCloneSnapshot()");
+ hbaseAtlasHook.sendHBaseTableOperation(tableDescriptor, null, HBaseAtlasHook.OPERATION.CREATE_TABLE, observerContext);
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== HBaseAtlasCoprocessor.postCloneSnapshot()");
+ }
}
@Override
- public void postRestoreSnapshot(ObserverContext observerContext, HBaseProtos.SnapshotDescription snapshotDescription, HTableDescriptor hTableDescriptor) throws IOException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("==> HBaseAtlasCoprocessor.postRestoreSnapshot()");
- }
- hbaseAtlasHook.sendHBaseTableOperation(hTableDescriptor, hTableDescriptor.getTableName(), HBaseAtlasHook.OPERATION.ALTER_TABLE);
+ public void postRestoreSnapshot(ObserverContext observerContext, SnapshotDescription snapshot, TableDescriptor tableDescriptor) throws IOException {
+ LOG.info("==> HBaseAtlasCoprocessor.postRestoreSnapshot()");
+
+ hbaseAtlasHook.sendHBaseTableOperation(tableDescriptor, snapshot.getTableName(), HBaseAtlasHook.OPERATION.ALTER_TABLE, observerContext);
+
if (LOG.isDebugEnabled()) {
LOG.debug("<== HBaseAtlasCoprocessor.postRestoreSnapshot()");
}
diff --git a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessorBase.java b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessorBase.java
deleted file mode 100644
index f4ca25a1a..000000000
--- a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessorBase.java
+++ /dev/null
@@ -1,991 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.hbase.hook;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.NavigableSet;
-import java.util.Set;
-
-
-import com.google.common.collect.ImmutableList;
-import org.apache.atlas.hook.AtlasHook;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.*;
-import org.apache.hadoop.hbase.client.*;
-import org.apache.hadoop.hbase.coprocessor.*;
-import org.apache.hadoop.hbase.filter.ByteArrayComparable;
-import org.apache.hadoop.hbase.filter.CompareFilter;
-import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-import org.apache.hadoop.hbase.io.Reference;
-import org.apache.hadoop.hbase.io.hfile.CacheConfig;
-import org.apache.hadoop.hbase.master.RegionPlan;
-import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
-import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.protobuf.generated.*;
-import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
-import org.apache.hadoop.hbase.regionserver.*;
-import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
-import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
-import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
-import org.apache.hadoop.hbase.util.Pair;
-import org.apache.hadoop.hbase.wal.WALKey;
-
-
-/**
- * This class exists only to prevent the clutter of methods that we don't intend to implement in the main co-processor class.
- *
- */
-public abstract class HBaseAtlasCoprocessorBase implements MasterObserver,RegionObserver,RegionServerObserver,BulkLoadObserver {
-
- @Override
- public void preCreateTable(ObserverContext observerContext, HTableDescriptor hTableDescriptor, HRegionInfo[] hRegionInfos) throws IOException {
-
- }
-
- @Override
- public void preCreateTableHandler(ObserverContext observerContext, HTableDescriptor hTableDescriptor, HRegionInfo[] hRegionInfos) throws IOException {
-
- }
-
- @Override
- public void preDeleteTable(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void preDeleteTableHandler(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void preTruncateTable(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void preTruncateTableHandler(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void preModifyTable(ObserverContext observerContext, TableName tableName, HTableDescriptor hTableDescriptor) throws IOException {
-
- }
-
- @Override
- public void preModifyTableHandler(ObserverContext observerContext, TableName tableName, HTableDescriptor hTableDescriptor) throws IOException {
-
- }
-
- @Override
- public void preAddColumn(ObserverContext observerContext, TableName tableName, HColumnDescriptor hColumnDescriptor) throws IOException {
-
- }
-
- @Override
- public void preAddColumnHandler(ObserverContext observerContext, TableName tableName, HColumnDescriptor hColumnDescriptor) throws IOException {
-
- }
-
- @Override
- public void preModifyColumn(ObserverContext observerContext, TableName tableName, HColumnDescriptor hColumnDescriptor) throws IOException {
-
- }
-
- @Override
- public void preModifyColumnHandler(ObserverContext observerContext, TableName tableName, HColumnDescriptor hColumnDescriptor) throws IOException {
-
- }
-
- @Override
- public void preDeleteColumn(ObserverContext observerContext, TableName tableName, byte[] bytes) throws IOException {
-
- }
-
- @Override
- public void preDeleteColumnHandler(ObserverContext observerContext, TableName tableName, byte[] bytes) throws IOException {
-
- }
-
- @Override
- public void preEnableTable(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void preEnableTableHandler(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void preDisableTable(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void preDisableTableHandler(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void preMove(ObserverContext observerContext, HRegionInfo hRegionInfo, ServerName serverName, ServerName serverName1) throws IOException {
-
- }
-
-
- @Override
- public void preListProcedures(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void preAssign(ObserverContext observerContext, HRegionInfo hRegionInfo) throws IOException {
-
- }
-
- @Override
- public void preUnassign(ObserverContext observerContext, HRegionInfo hRegionInfo, boolean b) throws IOException {
-
- }
-
- @Override
- public void preRegionOffline(ObserverContext observerContext, HRegionInfo hRegionInfo) throws IOException {
-
- }
-
- @Override
- public void preBalance(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public boolean preBalanceSwitch(ObserverContext observerContext, boolean b) throws IOException {
- return b;
- }
-
- @Override
- public void preShutdown(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void preStopMaster(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void preMasterInitialization(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void preSnapshot(ObserverContext observerContext, SnapshotDescription snapshotDescription, HTableDescriptor hTableDescriptor) throws IOException {
-
- }
-
- @Override
- public void preListSnapshot(ObserverContext observerContext, SnapshotDescription snapshotDescription) throws IOException {
-
- }
-
- @Override
- public void preCloneSnapshot(ObserverContext observerContext, SnapshotDescription snapshotDescription, HTableDescriptor hTableDescriptor) throws IOException {
-
- }
-
- @Override
- public void preRestoreSnapshot(ObserverContext observerContext, SnapshotDescription snapshotDescription, HTableDescriptor hTableDescriptor) throws IOException {
-
- }
-
- @Override
- public void preDeleteSnapshot(ObserverContext observerContext, SnapshotDescription snapshotDescription) throws IOException {
-
- }
-
- @Override
- public void preGetTableDescriptors(ObserverContext observerContext, List list, List list1) throws IOException {
-
- }
-
- @Override
- public void preGetTableDescriptors(ObserverContext observerContext, List list, List list1, String s) throws IOException {
-
- }
-
- @Override
- public void preGetTableNames(ObserverContext observerContext, List list, String s) throws IOException {
-
- }
-
- @Override
- public void preCreateNamespace(ObserverContext observerContext, NamespaceDescriptor namespaceDescriptor) throws IOException {
-
- }
-
- @Override
- public void preDeleteNamespace(ObserverContext observerContext, String s) throws IOException {
-
- }
-
- @Override
- public void preModifyNamespace(ObserverContext observerContext, NamespaceDescriptor namespaceDescriptor) throws IOException {
-
- }
-
- @Override
- public void preGetNamespaceDescriptor(ObserverContext observerContext, String s) throws IOException {
-
- }
-
- @Override
- public void preListNamespaceDescriptors(ObserverContext observerContext, List list) throws IOException {
-
- }
-
- @Override
- public void preTableFlush(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void preSetUserQuota(ObserverContext observerContext, String s, Quotas quotas) throws IOException {
-
- }
-
- @Override
- public void preSetUserQuota(ObserverContext observerContext, String s, TableName tableName, Quotas quotas) throws IOException {
-
- }
-
- @Override
- public void preSetUserQuota(ObserverContext observerContext, String s, String s1, Quotas quotas) throws IOException {
-
- }
-
- @Override
- public void preSetTableQuota(ObserverContext observerContext, TableName tableName, Quotas quotas) throws IOException {
-
- }
-
- @Override
- public void preSetNamespaceQuota(ObserverContext observerContext, String s, Quotas quotas) throws IOException {
-
- }
-
- @Override
- public void start(CoprocessorEnvironment coprocessorEnvironment) throws IOException {
-
- }
-
- @Override
- public void stop(CoprocessorEnvironment coprocessorEnvironment) throws IOException {
-
- }
-
- @Override
- public void postGetTableDescriptors(ObserverContext observerContext, List list) throws IOException {
-
- }
-
- @Override
- public void postBalance(ObserverContext observerContext, List list) throws IOException {
-
- }
-
- @Override
- public void postBalanceSwitch(ObserverContext observerContext, boolean b, boolean b1) throws IOException {
-
- }
-
- @Override
- public void postGetNamespaceDescriptor(ObserverContext observerContext, NamespaceDescriptor namespaceDescriptor) throws IOException {
-
- }
-
- @Override
- public void postStartMaster(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void postSnapshot(ObserverContext observerContext, SnapshotDescription snapshotDescription, HTableDescriptor hTableDescriptor) throws IOException {
-
- }
-
- @Override
- public void postSetNamespaceQuota(ObserverContext observerContext, String s, Quotas quotas) throws IOException {
-
- }
-
- @Override
- public void postAbortProcedure(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void postListProcedures(ObserverContext observerContext, List list) throws IOException {
-
- }
-
- @Override
- public void postCreateTableHandler(ObserverContext observerContext, HTableDescriptor hTableDescriptor, HRegionInfo[] hRegionInfos) throws IOException {
-
- }
-
- @Override
- public void postDeleteTableHandler(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void postTruncateTable(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void postTruncateTableHandler(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void postModifyTableHandler(ObserverContext observerContext, TableName tableName, HTableDescriptor hTableDescriptor) throws IOException {
-
- }
-
- @Override
- public void postAddColumnHandler(ObserverContext observerContext, TableName tableName, HColumnDescriptor hColumnDescriptor) throws IOException {
-
- }
-
- @Override
- public void postModifyColumnHandler(ObserverContext observerContext, TableName tableName, HColumnDescriptor hColumnDescriptor) throws IOException {
-
- }
-
- @Override
- public void postDeleteColumnHandler(ObserverContext observerContext, TableName tableName, byte[] bytes) throws IOException {
-
- }
- @Override
- public void postEnableTable(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void postEnableTableHandler(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void postDisableTable(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void postDisableTableHandler(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void postMove(ObserverContext observerContext, HRegionInfo hRegionInfo, ServerName serverName, ServerName serverName1) throws IOException {
-
- }
-
- @Override
- public void postAssign(ObserverContext observerContext, HRegionInfo hRegionInfo) throws IOException {
-
- }
-
- @Override
- public void postUnassign(ObserverContext observerContext, HRegionInfo hRegionInfo, boolean b) throws IOException {
-
- }
-
- @Override
- public void postRegionOffline(ObserverContext observerContext, HRegionInfo hRegionInfo) throws IOException {
-
- }
-
- @Override
- public void postListSnapshot(ObserverContext observerContext, HBaseProtos.SnapshotDescription snapshotDescription) throws IOException {
-
- }
-
- @Override
- public void postCloneSnapshot(ObserverContext observerContext, HBaseProtos.SnapshotDescription snapshotDescription, HTableDescriptor hTableDescriptor) throws IOException {
-
- }
-
- @Override
- public void postRestoreSnapshot(ObserverContext observerContext, HBaseProtos.SnapshotDescription snapshotDescription, HTableDescriptor hTableDescriptor) throws IOException {
-
- }
-
- @Override
- public void postDeleteSnapshot(ObserverContext observerContext, HBaseProtos.SnapshotDescription snapshotDescription) throws IOException {
-
- }
-
- @Override
- public void postGetTableDescriptors(ObserverContext observerContext, List list, List list1, String s) throws IOException {
-
- }
-
- @Override
- public void postGetTableNames(ObserverContext observerContext, List list, String s) throws IOException {
-
- }
-
- @Override
- public void postListNamespaceDescriptors(ObserverContext observerContext, List list) throws IOException {
-
- }
-
- @Override
- public void postTableFlush(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void postSetUserQuota(ObserverContext observerContext, String s, QuotaProtos.Quotas quotas) throws IOException {
-
- }
-
- @Override
- public void postSetUserQuota(ObserverContext observerContext, String s, TableName tableName, QuotaProtos.Quotas quotas) throws IOException {
-
- }
-
- @Override
- public void postSetUserQuota(ObserverContext observerContext, String s, String s1, QuotaProtos.Quotas quotas) throws IOException {
-
- }
-
- @Override
- public void postSetTableQuota(ObserverContext observerContext, TableName tableName, QuotaProtos.Quotas quotas) throws IOException {
-
- }
-
- @Override
- public void preOpen(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void postOpen(ObserverContext observerContext) {
-
- }
-
- @Override
- public void postLogReplay(ObserverContext observerContext) {
-
- }
-
- @Override
- public InternalScanner preFlushScannerOpen(ObserverContext observerContext, Store store, KeyValueScanner keyValueScanner, InternalScanner internalScanner) throws IOException {
- return null;
- }
-
- @Override
- public void preFlush(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public InternalScanner preFlush(ObserverContext observerContext, Store store, InternalScanner internalScanner) throws IOException {
- return internalScanner;
- }
-
- @Override
- public void postFlush(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void postFlush(ObserverContext observerContext, Store store, StoreFile storeFile) throws IOException {
-
- }
-
- @Override
- public void preCompactSelection(ObserverContext observerContext, Store store, List list, CompactionRequest compactionRequest) throws IOException {
-
- }
-
- @Override
- public void preCompactSelection(ObserverContext observerContext, Store store, List list) throws IOException {
-
- }
-
- @Override
- public void postCompactSelection(ObserverContext observerContext, Store store, ImmutableList immutableList, CompactionRequest compactionRequest) {
-
- }
-
- @Override
- public void postCompactSelection(ObserverContext observerContext, Store store, ImmutableList immutableList) {
-
- }
-
- @Override
- public InternalScanner preCompact(ObserverContext observerContext, Store store, InternalScanner internalScanner, ScanType scanType, CompactionRequest compactionRequest) throws IOException {
- return internalScanner;
- }
-
- @Override
- public InternalScanner preCompact(ObserverContext observerContext, Store store, InternalScanner internalScanner, ScanType scanType) throws IOException {
- return internalScanner;
- }
-
- @Override
- public InternalScanner preCompactScannerOpen(ObserverContext observerContext, Store store, List extends KeyValueScanner> list, ScanType scanType, long l, InternalScanner internalScanner, CompactionRequest compactionRequest) throws IOException {
- return internalScanner;
- }
-
- @Override
- public InternalScanner preCompactScannerOpen(ObserverContext observerContext, Store store, List extends KeyValueScanner> list, ScanType scanType, long l, InternalScanner internalScanner) throws IOException {
- return internalScanner;
- }
-
- @Override
- public void postCompact(ObserverContext observerContext, Store store, StoreFile storeFile, CompactionRequest compactionRequest) throws IOException {
-
- }
-
- @Override
- public void postCompact(ObserverContext observerContext, Store store, StoreFile storeFile) throws IOException {
-
- }
-
- @Override
- public void preSplit(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void preSplit(ObserverContext observerContext, byte[] bytes) throws IOException {
-
- }
-
- @Override
- public void postSplit(ObserverContext observerContext, Region region, Region region1) throws IOException {
-
- }
-
- @Override
- public void preSplitBeforePONR(ObserverContext observerContext, byte[] bytes, List list) throws IOException {
-
- }
-
- @Override
- public void preSplitAfterPONR(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void preRollBackSplit(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void postRollBackSplit(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void postCompleteSplit(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void preClose(ObserverContext observerContext, boolean b) throws IOException {
-
- }
-
- @Override
- public void postClose(ObserverContext observerContext, boolean b) {
-
- }
-
- @Override
- public void preGetClosestRowBefore(ObserverContext observerContext, byte[] bytes, byte[] bytes1, Result result) throws IOException {
-
- }
-
- @Override
- public void postGetClosestRowBefore(ObserverContext observerContext, byte[] bytes, byte[] bytes1, Result result) throws IOException {
-
- }
-
- @Override
- public void preGetOp(ObserverContext observerContext, Get get, List list) throws IOException {
-
- }
-
- @Override
- public void postGetOp(ObserverContext observerContext, Get get, List| list) throws IOException {
-
- }
-
- @Override
- public boolean preExists(ObserverContext observerContext, Get get, boolean b) throws IOException {
- return b;
- }
-
- @Override
- public boolean postExists(ObserverContext observerContext, Get get, boolean b) throws IOException {
- return b;
- }
-
- @Override
- public void prePut(ObserverContext observerContext, Put put, WALEdit walEdit, Durability durability) throws IOException {
-
- }
-
- @Override
- public void postPut(ObserverContext observerContext, Put put, WALEdit walEdit, Durability durability) throws IOException {
-
- }
-
- @Override
- public void preDelete(ObserverContext observerContext, Delete delete, WALEdit walEdit, Durability durability) throws IOException {
-
- }
-
- @Override
- public void prePrepareTimeStampForDeleteVersion(ObserverContext observerContext, Mutation mutation, Cell cell, byte[] bytes, Get get) throws IOException {
-
- }
-
- @Override
- public void postDelete(ObserverContext observerContext, Delete delete, WALEdit walEdit, Durability durability) throws IOException {
-
- }
-
- @Override
- public void preBatchMutate(ObserverContext observerContext, MiniBatchOperationInProgress miniBatchOperationInProgress) throws IOException {
-
- }
-
- @Override
- public void postBatchMutate(ObserverContext observerContext, MiniBatchOperationInProgress miniBatchOperationInProgress) throws IOException {
-
- }
-
- @Override
- public void postStartRegionOperation(ObserverContext observerContext, Region.Operation operation) throws IOException {
-
- }
-
- @Override
- public void postCloseRegionOperation(ObserverContext observerContext, Region.Operation operation) throws IOException {
-
- }
-
- @Override
- public void postBatchMutateIndispensably(ObserverContext observerContext, MiniBatchOperationInProgress miniBatchOperationInProgress, boolean b) throws IOException {
-
- }
-
- @Override
- public boolean preCheckAndPut(ObserverContext observerContext, byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, ByteArrayComparable byteArrayComparable, Put put, boolean b) throws IOException {
- return b;
- }
-
- @Override
- public boolean preCheckAndPutAfterRowLock(ObserverContext observerContext, byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, ByteArrayComparable byteArrayComparable, Put put, boolean b) throws IOException {
- return false;
- }
-
- @Override
- public boolean postCheckAndPut(ObserverContext observerContext, byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, ByteArrayComparable byteArrayComparable, Put put, boolean b) throws IOException {
- return b;
- }
-
- @Override
- public boolean preCheckAndDelete(ObserverContext observerContext, byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, ByteArrayComparable byteArrayComparable, Delete delete, boolean b) throws IOException {
- return b;
- }
-
- @Override
- public boolean preCheckAndDeleteAfterRowLock(ObserverContext observerContext, byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, ByteArrayComparable byteArrayComparable, Delete delete, boolean b) throws IOException {
- return b;
- }
-
- @Override
- public boolean postCheckAndDelete(ObserverContext observerContext, byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, ByteArrayComparable byteArrayComparable, Delete delete, boolean b) throws IOException {
- return false;
- }
-
- @Override
- public long preIncrementColumnValue(ObserverContext observerContext, byte[] bytes, byte[] bytes1, byte[] bytes2, long l, boolean b) throws IOException {
- return l;
- }
-
- @Override
- public long postIncrementColumnValue(ObserverContext observerContext, byte[] bytes, byte[] bytes1, byte[] bytes2, long l, boolean b, long l1) throws IOException {
- return l;
- }
-
- @Override
- public Result preAppend(ObserverContext observerContext, Append append) throws IOException {
- return null;
- }
-
- @Override
- public Result preAppendAfterRowLock(ObserverContext observerContext, Append append) throws IOException {
- return null;
- }
-
- @Override
- public Result postAppend(ObserverContext observerContext, Append append, Result result) throws IOException {
- return result;
- }
-
- @Override
- public Result preIncrement(ObserverContext observerContext, Increment increment) throws IOException {
- return null;
- }
-
- @Override
- public Result preIncrementAfterRowLock(ObserverContext observerContext, Increment increment) throws IOException {
- return null;
- }
-
- @Override
- public Result postIncrement(ObserverContext observerContext, Increment increment, Result result) throws IOException {
- return result;
- }
-
- @Override
- public RegionScanner preScannerOpen(ObserverContext observerContext, Scan scan, RegionScanner regionScanner) throws IOException {
- return regionScanner;
- }
-
- @Override
- public KeyValueScanner preStoreScannerOpen(ObserverContext observerContext, Store store, Scan scan, NavigableSet navigableSet, KeyValueScanner keyValueScanner) throws IOException {
- return keyValueScanner;
- }
-
- @Override
- public RegionScanner postScannerOpen(ObserverContext observerContext, Scan scan, RegionScanner regionScanner) throws IOException {
- return regionScanner;
- }
-
- @Override
- public boolean preScannerNext(ObserverContext observerContext, InternalScanner internalScanner, List list, int i, boolean b) throws IOException {
- return b;
- }
-
- @Override
- public boolean postScannerNext(ObserverContext observerContext, InternalScanner internalScanner, List list, int i, boolean b) throws IOException {
- return b;
- }
-
- @Override
- public boolean postScannerFilterRow(ObserverContext observerContext, InternalScanner internalScanner, byte[] bytes, int i, short i1, boolean b) throws IOException {
- return b;
- }
-
- @Override
- public void preScannerClose(ObserverContext observerContext, InternalScanner internalScanner) throws IOException {
-
- }
-
- @Override
- public void postScannerClose(ObserverContext observerContext, InternalScanner internalScanner) throws IOException {
-
- }
-
- @Override
- public void preWALRestore(ObserverContext extends RegionCoprocessorEnvironment> observerContext, HRegionInfo hRegionInfo, WALKey walKey, WALEdit walEdit) throws IOException {
-
- }
-
- @Override
- public void preWALRestore(ObserverContext observerContext, HRegionInfo hRegionInfo, HLogKey hLogKey, WALEdit walEdit) throws IOException {
-
- }
-
- @Override
- public void postWALRestore(ObserverContext extends RegionCoprocessorEnvironment> observerContext, HRegionInfo hRegionInfo, WALKey walKey, WALEdit walEdit) throws IOException {
-
- }
-
- @Override
- public void postWALRestore(ObserverContext observerContext, HRegionInfo hRegionInfo, HLogKey hLogKey, WALEdit walEdit) throws IOException {
-
- }
-
- @Override
- public void preBulkLoadHFile(ObserverContext observerContext, List> list) throws IOException {
-
- }
-
- @Override
- public boolean postBulkLoadHFile(ObserverContext observerContext, List> list, boolean b) throws IOException {
- return b;
- }
-
- @Override
- public StoreFile.Reader preStoreFileReaderOpen(ObserverContext observerContext, FileSystem fileSystem, Path path, FSDataInputStreamWrapper fsDataInputStreamWrapper, long l, CacheConfig cacheConfig, Reference reference, StoreFile.Reader reader) throws IOException {
- return reader;
- }
-
- @Override
- public StoreFile.Reader postStoreFileReaderOpen(ObserverContext observerContext, FileSystem fileSystem, Path path, FSDataInputStreamWrapper fsDataInputStreamWrapper, long l, CacheConfig cacheConfig, Reference reference, StoreFile.Reader reader) throws IOException {
- return reader;
- }
-
- @Override
- public Cell postMutationBeforeWAL(ObserverContext observerContext, MutationType mutationType, Mutation mutation, Cell cell, Cell cell1) throws IOException {
- return cell;
- }
-
- @Override
- public DeleteTracker postInstantiateDeleteTracker(ObserverContext observerContext, DeleteTracker deleteTracker) throws IOException {
- return deleteTracker;
- }
-
- @Override
- public void preStopRegionServer(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void preMerge(ObserverContext observerContext, Region region, Region region1) throws IOException {
-
- }
-
- @Override
- public void postMerge(ObserverContext observerContext, Region region, Region region1, Region region2) throws IOException {
-
- }
-
- @Override
- public void preMergeCommit(ObserverContext observerContext, Region region, Region region1, @MetaMutationAnnotation List list) throws IOException {
-
- }
-
- @Override
- public void postMergeCommit(ObserverContext observerContext, Region region, Region region1, Region region2) throws IOException {
-
- }
-
- @Override
- public void preRollBackMerge(ObserverContext observerContext, Region region, Region region1) throws IOException {
-
- }
-
- @Override
- public void postRollBackMerge(ObserverContext observerContext, Region region, Region region1) throws IOException {
-
- }
-
- @Override
- public void preRollWALWriterRequest(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public void postRollWALWriterRequest(ObserverContext observerContext) throws IOException {
-
- }
-
- @Override
- public ReplicationEndpoint postCreateReplicationEndPoint(ObserverContext observerContext, ReplicationEndpoint replicationEndpoint) {
- return null;
- }
-
- @Override
- public void preReplicateLogEntries(ObserverContext observerContext, List list, CellScanner cellScanner) throws IOException {
-
- }
-
- @Override
- public void postReplicateLogEntries(ObserverContext observerContext, List list, CellScanner cellScanner) throws IOException {
-
- }
-
- @Override
- public void prePrepareBulkLoad(ObserverContext observerContext, SecureBulkLoadProtos.PrepareBulkLoadRequest prepareBulkLoadRequest) throws IOException {
-
- }
-
- @Override
- public void preCleanupBulkLoad(ObserverContext observerContext, SecureBulkLoadProtos.CleanupBulkLoadRequest cleanupBulkLoadRequest) throws IOException {
-
- }
-
- @Override
- public void postCreateTable(ObserverContext observerContext, HTableDescriptor hTableDescriptor, HRegionInfo[] hRegionInfos) throws IOException {
-
- }
-
-
-
- @Override
- public void postDeleteTable(ObserverContext observerContext, TableName tableName) throws IOException {
-
- }
-
- @Override
- public void postModifyTable(ObserverContext observerContext, TableName tableName, HTableDescriptor hTableDescriptor) throws IOException {
-
- }
-
- @Override
- public void postAddColumn(ObserverContext observerContext, TableName tableName, HColumnDescriptor hColumnDescriptor) throws IOException {
-
- }
-
- @Override
- public void postModifyColumn(ObserverContext observerContext, TableName tableName, HColumnDescriptor hColumnDescriptor) throws IOException {
-
- }
-
- @Override
- public void postDeleteColumn(ObserverContext observerContext, TableName tableName, byte[] bytes) throws IOException {
-
- }
-
- @Override
- public void postCreateNamespace(ObserverContext observerContext, NamespaceDescriptor namespaceDescriptor) throws IOException {
-
- }
-
- @Override
- public void postDeleteNamespace(ObserverContext observerContext, String s) throws IOException {
-
- }
-
- @Override
- public void postModifyNamespace(ObserverContext observerContext, NamespaceDescriptor namespaceDescriptor) throws IOException {
-
- }
-
- @Override
- public void preAbortProcedure(ObserverContext observerContext, ProcedureExecutor procedureExecutor, long l) throws IOException {
-
- }
-}
diff --git a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java
index bc8485b9f..1ef7c07de 100644
--- a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java
+++ b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java
@@ -24,6 +24,8 @@ import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.security.UserGroupInformation;
import java.util.ArrayList;
@@ -37,41 +39,41 @@ public class HBaseOperationContext {
private final HBaseAtlasHook.OPERATION operation;
private final String user;
private final NamespaceDescriptor namespaceDescriptor;
- private final HTableDescriptor hTableDescriptor;
- private final HColumnDescriptor[] hColumnDescriptors;
+ private final TableDescriptor tableDescriptor;
+ private final ColumnFamilyDescriptor[] columnFamilyDescriptors;
private final TableName tableName;
private final String nameSpace;
private final String columnFamily;
private final String owner;
- private final HColumnDescriptor hColumnDescriptor;
+ private final ColumnFamilyDescriptor columnFamilyDescriptor;
- public HBaseOperationContext(NamespaceDescriptor namespaceDescriptor, String nameSpace, HTableDescriptor hTableDescriptor, TableName tableName, HColumnDescriptor[] hColumnDescriptors,
- HColumnDescriptor hColumnDescriptor, String columnFamily, HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi , String user, String owner,
- Map hbaseConf) {
- this.namespaceDescriptor = namespaceDescriptor;
- this.nameSpace = nameSpace;
- this.hTableDescriptor = hTableDescriptor;
- this.tableName = tableName;
- this.hColumnDescriptors = hColumnDescriptors;
- this.hColumnDescriptor = hColumnDescriptor;
- this.columnFamily = columnFamily;
- this.operation = operation;
- this.ugi = ugi;
- this.user = user;
- this.owner = owner;
- this.hbaseConf = hbaseConf;
+ public HBaseOperationContext(NamespaceDescriptor namespaceDescriptor, String nameSpace, TableDescriptor tableDescriptor, TableName tableName, ColumnFamilyDescriptor[] columnFamilyDescriptors,
+ ColumnFamilyDescriptor columnFamilyDescriptor, String columnFamily, HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi , String user, String owner,
+ Map | |