Skip to content

Commit

Permalink
ZOOKEEPER-3495: fix SnapshotDigestTest to work with JDK12+
Browse files Browse the repository at this point in the history
The problem with the test `SnapshotDigestTest.testDifferentDigestVersion` was that it used reflection to change a final static value in `DigestCalculator`, what is no longer supported with JDK 12+ (see [JDK-8210522](https://bugs.openjdk.java.net/browse/JDK-8210522))

I think there are still some hacky solutions to go behind these limitations (with [PowerMock](https://github.com/powermock/powermock) maybe or using [VarHandle as suggested here](https://stackoverflow.com/questions/56039341/get-declared-fields-of-java-lang-reflect-fields-in-jdk12)), but I think the best is to avoid the situation when we need to poke the static final field.

I changed the fully static `DigestCalculator`, converting it to a static singleton object with non-static methods / fields. The fields of the singleton object can not be modified from production code, but we can change them from the tests even in JDK 12 / 13 (as they are not static final fields).

I tested it locally using openjdk, I hope it will also work in jenkins.

Author: Mate Szalay-Beko <[email protected]>
Author: Mate Szalay-Beko <[email protected]>

Reviewers: [email protected], [email protected], [email protected]

Closes apache#1056 from symat/ZOOKEEPER-3495 and squashes the following commits:

fa2a3b3 [Mate Szalay-Beko] Merge remote-tracking branch 'apache/master' into HEAD
67c5532 [Mate Szalay-Beko] ZOOKEEPER-3495 move digestEnabled to the ZooKeeperServer class
c58a88c [Mate Szalay-Beko] ZOOKEEPER-3495: implement code-review comments
1a8cc2b [Mate Szalay-Beko] Merge remote-tracking branch 'origin/master' into ZOOKEEPER-3495
90da4c4 [Mate Szalay-Beko] ZOOKEEPER-3495: NodeHashMap use the same DigestCalculator instance as DataTree
e1f659c [Mate Szalay-Beko] Merge remote-tracking branch 'origin/master' into ZOOKEEPER-3495
dbbf38f [Mate Szalay-Beko] ZOOKEEPER-3495: fix checkstyle errors
07741ae [Mate Szalay-Beko] Merge remote-tracking branch 'origin/master' into ZOOKEEPER-3495
d0d886d [Mate Szalay-Beko] ZOOKEEPER-3495: change implementation according to code review comments
2a80c32 [Mate Szalay-Beko] ZOOKEEPER-3495: fix SnapshotDigestTest to work with JDK12+
  • Loading branch information
symat authored and anmolnar committed Sep 10, 2019
1 parent 692ea8b commit f01d01c
Show file tree
Hide file tree
Showing 8 changed files with 83 additions and 89 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@
import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Stat;
import org.apache.zookeeper.data.StatPersisted;
import org.apache.zookeeper.server.util.DigestCalculator;
import org.apache.zookeeper.server.watch.IWatchManager;
import org.apache.zookeeper.server.watch.WatchManagerFactory;
import org.apache.zookeeper.server.watch.WatcherOrBitSet;
Expand Down Expand Up @@ -96,7 +95,7 @@ public class DataTree {
* This map provides a fast lookup to the datanodes. The tree is the
* source of truth and is where all the locking occurs
*/
private final NodeHashMap nodes = new NodeHashMapImpl();
private final NodeHashMap nodes;

private IWatchManager dataWatches;

Expand Down Expand Up @@ -179,6 +178,8 @@ public class DataTree {
// The historical digests list.
private LinkedList<ZxidDigest> digestLog = new LinkedList<>();

private final DigestCalculator digestCalculator;

@SuppressWarnings("unchecked")
public Set<String> getEphemerals(long sessionId) {
HashSet<String> retv = ephemerals.get(sessionId);
Expand Down Expand Up @@ -269,6 +270,13 @@ public long cachedApproximateDataSize() {
private final DataNode quotaDataNode = new DataNode(new byte[0], -1L, new StatPersisted());

public DataTree() {
this(new DigestCalculator());
}

DataTree(DigestCalculator digestCalculator) {
this.digestCalculator = digestCalculator;
nodes = new NodeHashMapImpl(digestCalculator);

/* Rather than fight it, let root have an alias */
nodes.put("", root);
nodes.putWithoutDigest(rootZookeeper, root);
Expand Down Expand Up @@ -1594,7 +1602,7 @@ private void updateWriteStat(String path, long bytes) {
* Add the digest to the historical list, and update the latest zxid digest.
*/
private void logZxidDigest(long zxid, long digest) {
ZxidDigest zxidDigest = new ZxidDigest(zxid, DigestCalculator.DIGEST_VERSION, digest);
ZxidDigest zxidDigest = new ZxidDigest(zxid, digestCalculator.getDigestVersion(), digest);
lastProcessedZxidDigest = zxidDigest;
if (zxidDigest.zxid % DIGEST_LOG_INTERVAL == 0) {
synchronized (digestLog) {
Expand All @@ -1615,7 +1623,7 @@ private void logZxidDigest(long zxid, long digest) {
* @return true if the digest is serialized successfully
*/
public boolean serializeZxidDigest(OutputArchive oa) throws IOException {
if (!DigestCalculator.digestEnabled()) {
if (!ZooKeeperServer.isDigestEnabled()) {
return false;
}

Expand All @@ -1636,7 +1644,7 @@ public boolean serializeZxidDigest(OutputArchive oa) throws IOException {
* @return the true if it deserialized successfully
*/
public boolean deserializeZxidDigest(InputArchive ia) throws IOException {
if (!DigestCalculator.digestEnabled()) {
if (!ZooKeeperServer.isDigestEnabled()) {
return false;
}

Expand All @@ -1661,9 +1669,9 @@ public boolean deserializeZxidDigest(InputArchive ia) throws IOException {
*/
public void compareSnapshotDigests(long zxid) {
if (zxid == digestFromLoadedSnapshot.zxid) {
if (DigestCalculator.DIGEST_VERSION != digestFromLoadedSnapshot.digestVersion) {
if (digestCalculator.getDigestVersion() != digestFromLoadedSnapshot.digestVersion) {
LOG.info("Digest version changed, local: {}, new: {}, "
+ "skip comparing digest now.", digestFromLoadedSnapshot.digestVersion, DigestCalculator.DIGEST_VERSION);
+ "skip comparing digest now.", digestFromLoadedSnapshot.digestVersion, digestCalculator.getDigestVersion());
digestFromLoadedSnapshot = null;
return;
}
Expand Down Expand Up @@ -1722,10 +1730,9 @@ public List<ZxidDigest> getDigestLog() {
}

/**
* A helper class to maintain the digest meta associated with specific
* zxid.
* A helper class to maintain the digest meta associated with specific zxid.
*/
public static class ZxidDigest {
public class ZxidDigest {

long zxid;
// the digest value associated with this zxid
Expand All @@ -1734,7 +1741,7 @@ public static class ZxidDigest {
int digestVersion;

ZxidDigest() {
this(0, DigestCalculator.DIGEST_VERSION, 0);
this(0, digestCalculator.getDigestVersion(), 0);
}

ZxidDigest(long zxid, int digestVersion, long digest) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,36 +16,26 @@
* limitations under the License.
*/

package org.apache.zookeeper.server.util;
package org.apache.zookeeper.server;

import java.nio.ByteBuffer;
import java.util.zip.CRC32;
import org.apache.zookeeper.ZooDefs;
import org.apache.zookeeper.data.StatPersisted;
import org.apache.zookeeper.server.DataNode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Defines how to calculate the digest for a given node.
*/
public class DigestCalculator {
class DigestCalculator {

private static final Logger LOG = LoggerFactory.getLogger(DigestCalculator.class);

// The hardcoded digest version, should bump up this version whenever
// we changed the digest method or fields.
//
// Defined it as Integer to make it able to be changed in test via reflection
public static final Integer DIGEST_VERSION = 2;
private static final int DIGEST_VERSION = 2;

public static final String ZOOKEEPER_DIGEST_ENABLED = "zookeeper.digest.enabled";
private static boolean digestEnabled;

static {
digestEnabled = Boolean.parseBoolean(System.getProperty(ZOOKEEPER_DIGEST_ENABLED, "true"));
LOG.info("{} = {}", ZOOKEEPER_DIGEST_ENABLED, digestEnabled);
}

/**
* Calculate the digest based on the given params.
Expand All @@ -68,9 +58,9 @@ public class DigestCalculator {
* @param stat the stat associated with the node
* @return the digest calculated from the given params
*/
public static long calculateDigest(String path, byte[] data, StatPersisted stat) {
long calculateDigest(String path, byte[] data, StatPersisted stat) {

if (!digestEnabled()) {
if (!ZooKeeperServer.isDigestEnabled()) {
return 0;
}

Expand Down Expand Up @@ -120,7 +110,7 @@ public static long calculateDigest(String path, byte[] data, StatPersisted stat)
/**
* Calculate the digest based on the given path and data node.
*/
public static long calculateDigest(String path, DataNode node) {
long calculateDigest(String path, DataNode node) {
if (!node.isDigestCached()) {
node.setDigest(calculateDigest(path, node.getData(), node.stat));
node.setDigestCached(true);
Expand All @@ -129,15 +119,10 @@ public static long calculateDigest(String path, DataNode node) {
}

/**
* Return true if the digest is enabled.
* Returns with the current digest version.
*/
public static boolean digestEnabled() {
return digestEnabled;
}

// Visible for test purpose
public static void setDigestEnabled(boolean enabled) {
digestEnabled = enabled;
int getDigestVersion() {
return DIGEST_VERSION;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.zookeeper.server.util.AdHash;
import org.apache.zookeeper.server.util.DigestCalculator;

/**
* a simple wrapper to ConcurrentHashMap that recalculates a digest after
Expand All @@ -33,6 +32,11 @@ public class NodeHashMapImpl implements NodeHashMap {
private final ConcurrentHashMap<String, DataNode> nodes = new ConcurrentHashMap<String, DataNode>();

private AdHash hash = new AdHash();
private final DigestCalculator digestCalculator;

public NodeHashMapImpl(DigestCalculator digestCalculator) {
this.digestCalculator = digestCalculator;
}

@Override
public DataNode put(String path, DataNode node) {
Expand Down Expand Up @@ -98,14 +102,14 @@ public void postChange(String path, DataNode node) {
}

private void addDigest(String path, DataNode node) {
if (DigestCalculator.digestEnabled()) {
hash.addDigest(DigestCalculator.calculateDigest(path, node));
if (ZooKeeperServer.isDigestEnabled()) {
hash.addDigest(digestCalculator.calculateDigest(path, node));
}
}

private void removeDigest(String path, DataNode node) {
if (DigestCalculator.digestEnabled()) {
hash.removeDigest(DigestCalculator.calculateDigest(path, node));
if (ZooKeeperServer.isDigestEnabled()) {
hash.removeDigest(digestCalculator.calculateDigest(path, node));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,9 @@ public class ZooKeeperServer implements SessionExpirer, ServerStats.Provider {
public static final String SESSION_REQUIRE_CLIENT_SASL_AUTH = "zookeeper.sessionRequireClientSASLAuth";
public static final String SASL_AUTH_SCHEME = "sasl";

public static final String ZOOKEEPER_DIGEST_ENABLED = "zookeeper.digest.enabled";
private static boolean digestEnabled;

static {
LOG = LoggerFactory.getLogger(ZooKeeperServer.class);

Expand All @@ -121,6 +124,9 @@ public class ZooKeeperServer implements SessionExpirer, ServerStats.Provider {
if (skipACL) {
LOG.info(SKIP_ACL + "==\"yes\", ACL checks will be skipped");
}

digestEnabled = Boolean.parseBoolean(System.getProperty(ZOOKEEPER_DIGEST_ENABLED, "true"));
LOG.info("{} = {}", ZOOKEEPER_DIGEST_ENABLED, digestEnabled);
}

protected ZooKeeperServerBean jmxServerBean;
Expand Down Expand Up @@ -1743,6 +1749,15 @@ public void checkACL(ServerCnxn cnxn, List<ACL> acl, int perm, List<Id> ids, Str
throw new KeeperException.NoAuthException();
}

public static boolean isDigestEnabled() {
return digestEnabled;
}

public static void setDigestEnabled(boolean digestEnabled) {
LOG.info("{} = {}", ZOOKEEPER_DIGEST_ENABLED, digestEnabled);
ZooKeeperServer.digestEnabled = digestEnabled;
}

/**
* Trim a path to get the immediate predecessor.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,8 @@
import org.apache.zookeeper.common.PathTrie;
import org.apache.zookeeper.data.Stat;
import org.apache.zookeeper.metrics.MetricsUtils;
import org.apache.zookeeper.server.util.DigestCalculator;
import org.apache.zookeeper.txn.CreateTxn;
import org.apache.zookeeper.txn.TxnHeader;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -59,18 +56,6 @@ public class DataTreeTest extends ZKTestCase {

protected static final Logger LOG = LoggerFactory.getLogger(DataTreeTest.class);

private DataTree dt;

@Before
public void setUp() throws Exception {
dt = new DataTree();
}

@After
public void tearDown() throws Exception {
dt = null;
}

/**
* For ZOOKEEPER-1755 - Test race condition when taking dumpEphemerals and
* removing the session related ephemerals from DataTree structure
Expand Down Expand Up @@ -133,6 +118,7 @@ public void process(WatchedEvent event) {
}
MyWatcher watcher = new MyWatcher();
// set a watch on the root node
DataTree dt = new DataTree();
dt.getChildren("/", new Stat(), watcher);
// add a new node, should trigger a watch
dt.createNode("/xyz", new byte[0], null, 0, dt.getNode("/").stat.getCversion() + 1, 1, 1);
Expand All @@ -145,7 +131,8 @@ public void process(WatchedEvent event) {
@Test(timeout = 60000)
public void testIncrementCversion() throws Exception {
try {
DigestCalculator.setDigestEnabled(true);
// digestCalculator gets initialized for the new DataTree constructor based on the system property
ZooKeeperServer.setDigestEnabled(true);
DataTree dt = new DataTree();
dt.createNode("/test", new byte[0], null, 0, dt.getNode("/").stat.getCversion() + 1, 1, 1);
DataNode zk = dt.getNode("/test");
Expand All @@ -166,12 +153,13 @@ public void testIncrementCversion() throws Exception {
+ ">", (newCversion == prevCversion + 1 && newPzxid == prevPzxid + 1));
assertNotEquals(digestBefore, dt.getTreeDigest());
} finally {
DigestCalculator.setDigestEnabled(false);
ZooKeeperServer.setDigestEnabled(false);
}
}

@Test
public void testNoCversionRevert() throws Exception {
DataTree dt = new DataTree();
DataNode parent = dt.getNode("/");
dt.createNode("/test", new byte[0], null, 0, parent.stat.getCversion() + 1, 1, 1);
int currentCversion = parent.stat.getCversion();
Expand All @@ -193,6 +181,7 @@ public void testNoCversionRevert() throws Exception {

@Test
public void testPzxidUpdatedWhenDeletingNonExistNode() throws Exception {
DataTree dt = new DataTree();
DataNode root = dt.getNode("/");
long currentPzxid = root.stat.getPzxid();

Expand All @@ -219,7 +208,10 @@ public void testPzxidUpdatedWhenDeletingNonExistNode() throws Exception {
@Test
public void testDigestUpdatedWhenReplayCreateTxnForExistNode() {
try {
DigestCalculator.setDigestEnabled(true);
// digestCalculator gets initialized for the new DataTree constructor based on the system property
ZooKeeperServer.setDigestEnabled(true);
DataTree dt = new DataTree();

dt.processTxn(new TxnHeader(13, 1000, 1, 30, ZooDefs.OpCode.create), new CreateTxn("/foo", "".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, false, 1));

// create the same node with a higher cversion to simulate the
Expand All @@ -230,7 +222,7 @@ public void testDigestUpdatedWhenReplayCreateTxnForExistNode() {
// check the current digest value
assertEquals(dt.getTreeDigest(), dt.getLastProcessedZxidDigest().digest);
} finally {
DigestCalculator.setDigestEnabled(false);
ZooKeeperServer.setDigestEnabled(false);
}
}

Expand Down Expand Up @@ -456,7 +448,7 @@ public void testDataTreeMetrics() throws Exception {
public void testDigest() throws Exception {
try {
// enable diegst check
DigestCalculator.setDigestEnabled(true);
ZooKeeperServer.setDigestEnabled(true);

DataTree dt = new DataTree();

Expand Down Expand Up @@ -487,7 +479,7 @@ public void testDigest() throws Exception {
dt.deleteNode("/digesttest/1", 5);
assertNotEquals(dt.getTreeDigest(), previousDigest);
} finally {
DigestCalculator.setDigestEnabled(false);
ZooKeeperServer.setDigestEnabled(false);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import java.util.Set;
import org.apache.zookeeper.ZKTestCase;
import org.apache.zookeeper.data.StatPersisted;
import org.apache.zookeeper.server.util.DigestCalculator;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
Expand All @@ -33,20 +32,20 @@ public class NodeHashMapImplTest extends ZKTestCase {

@Before
public void setUp() {
DigestCalculator.setDigestEnabled(true);
ZooKeeperServer.setDigestEnabled(true);
}

@After
public void tearDown() {
DigestCalculator.setDigestEnabled(false);
ZooKeeperServer.setDigestEnabled(false);
}

/**
* Test all the operations supported in NodeHashMapImpl.
*/
@Test
public void testOperations() {
NodeHashMapImpl nodes = new NodeHashMapImpl();
NodeHashMapImpl nodes = new NodeHashMapImpl(new DigestCalculator());

assertEquals(0, nodes.size());
assertEquals(0L, nodes.getDigest());
Expand Down
Loading

0 comments on commit f01d01c

Please sign in to comment.