Skip to content

Commit

Permalink
Remove keys methods in favor of keySet() on immutable maps (elastic#8…
Browse files Browse the repository at this point in the history
…4650)

This commit removes the older keys() and keysIt() methods on immutable
maps. Instead, keySet() is used directly.
  • Loading branch information
rjernst authored Mar 4, 2022
1 parent 3c987bc commit 42872d3
Show file tree
Hide file tree
Showing 32 changed files with 82 additions and 111 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,8 @@ public void testAllowed() {
request.addAliasAction(IndicesAliasesRequest.AliasActions.add().index("index").alias("alias"));
assertAcked(client().admin().indices().aliases(request).actionGet());
final GetAliasesResponse response = client().admin().indices().getAliases(new GetAliasesRequest("alias")).actionGet();
assertThat(response.getAliases().keys().size(), equalTo(1));
assertThat(response.getAliases().keys().iterator().next().value, equalTo("index"));
assertThat(response.getAliases().keySet().size(), equalTo(1));
assertThat(response.getAliases().keySet().iterator().next(), equalTo("index"));
final List<AliasMetadata> aliasMetadata = response.getAliases().get("index");
assertThat(aliasMetadata, hasSize(1));
assertThat(aliasMetadata.get(0).alias(), equalTo("alias"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ private ClusterState.Builder randomNodes(ClusterState clusterState) {
DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(clusterState.nodes());
List<String> nodeIds = randomSubsetOf(
randomInt(clusterState.nodes().getNodes().size() - 1),
clusterState.nodes().getNodes().keys().toArray(String.class)
clusterState.nodes().getNodes().keySet().toArray(new String[0])
);
for (String nodeId : nodeIds) {
if (nodeId.startsWith("node-")) {
Expand All @@ -241,7 +241,7 @@ private ClusterState.Builder randomRoutingTable(ClusterState clusterState) {
if (numberOfIndices > 0) {
List<String> randomIndices = randomSubsetOf(
randomInt(numberOfIndices - 1),
clusterState.routingTable().indicesRouting().keys().toArray(String.class)
clusterState.routingTable().indicesRouting().keySet().toArray(new String[0])
);
for (String index : randomIndices) {
if (randomBoolean()) {
Expand All @@ -250,15 +250,15 @@ private ClusterState.Builder randomRoutingTable(ClusterState clusterState) {
builder.add(
randomChangeToIndexRoutingTable(
clusterState.routingTable().indicesRouting().get(index),
clusterState.nodes().getNodes().keys().toArray(String.class)
clusterState.nodes().getNodes().keySet().toArray(new String[0])
)
);
}
}
}
int additionalIndexCount = randomIntBetween(1, 20);
for (int i = 0; i < additionalIndexCount; i++) {
builder.add(randomIndexRoutingTable("index-" + randomInt(), clusterState.nodes().getNodes().keys().toArray(String.class)));
builder.add(randomIndexRoutingTable("index-" + randomInt(), clusterState.nodes().getNodes().keySet().toArray(new String[0])));
}
return ClusterState.builder(clusterState).routingTable(builder.build());
}
Expand Down Expand Up @@ -403,7 +403,7 @@ private <T> ClusterState randomClusterStateParts(ClusterState clusterState, Stri
if (partCount > 0) {
List<String> randomParts = randomSubsetOf(
randomInt(partCount - 1),
randomPart.parts(clusterState).keys().toArray(String.class)
randomPart.parts(clusterState).keySet().toArray(new String[0])
);
for (String part : randomParts) {
if (randomBoolean()) {
Expand Down Expand Up @@ -502,7 +502,7 @@ private <T> Metadata randomParts(Metadata metadata, String prefix, RandomPart<T>
ImmutableOpenMap<String, T> parts = randomPart.parts(metadata);
int partCount = parts.size();
if (partCount > 0) {
List<String> randomParts = randomSubsetOf(randomInt(partCount - 1), randomPart.parts(metadata).keys().toArray(String.class));
List<String> randomParts = randomSubsetOf(randomInt(partCount - 1), randomPart.parts(metadata).keySet().toArray(new String[0]));
for (String part : randomParts) {
if (randomBoolean()) {
randomPart.remove(builder, part);
Expand Down Expand Up @@ -564,7 +564,7 @@ public IndexMetadata randomChange(IndexMetadata part) {
break;
case 1:
if (randomBoolean() && part.getAliases().isEmpty() == false) {
builder.removeAlias(randomFrom(part.getAliases().keys().toArray(String.class)));
builder.removeAlias(randomFrom(part.getAliases().keySet().toArray(new String[0])));
} else {
builder.putAlias(AliasMetadata.builder(randomAlphaOfLength(10)));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ public void beforeIndexCreated(Index index, Settings indexSettings) {
} catch (Exception e) {
assertTrue(e.getMessage().contains("failing on purpose"));
ClusterStateResponse resp = client().admin().cluster().prepareState().get();
assertFalse(resp.getState().routingTable().indicesRouting().keys().contains("failed"));
assertFalse(resp.getState().routingTable().indicesRouting().keySet().contains("failed"));
}
}

Expand Down Expand Up @@ -155,7 +155,7 @@ public void testIndexStateShardChanged() throws Throwable {
} catch (ElasticsearchException e) {
assertTrue(e.getMessage().contains("failing on purpose"));
ClusterStateResponse resp = client().admin().cluster().prepareState().get();
assertFalse(resp.getState().routingTable().indicesRouting().keys().contains("failed"));
assertFalse(resp.getState().routingTable().indicesRouting().keySet().contains("failed"));
}

// create an index
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -418,7 +418,7 @@ static void checkNoDuplicatedAliasInIndexTemplate(
Locale.ROOT,
"Rollover alias [%s] can point to multiple indices, found duplicated alias [%s] in index template [%s]",
rolloverRequestAlias,
template.aliases().keys(),
template.aliases().keySet(),
template.name()
)
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ protected void processNodePaths(Terminal terminal, Path[] dataPaths, OptionSet o
terminal.println(Terminal.Verbosity.VERBOSE, "Loading cluster state");
final Tuple<Long, ClusterState> termAndClusterState = loadTermAndClusterState(persistedClusterStateService, env);
final ClusterState oldClusterState = termAndClusterState.v2();
terminal.println(Terminal.Verbosity.VERBOSE, "custom metadata names: " + oldClusterState.metadata().customs().keys());
terminal.println(Terminal.Verbosity.VERBOSE, "custom metadata names: " + oldClusterState.metadata().customs().keySet());
final Metadata.Builder metadataBuilder = Metadata.builder(oldClusterState.metadata());
for (String customToRemove : customsToRemove) {
boolean matched = false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ public ConcreteIndex(IndexMetadata indexMetadata, DataStream dataStream) {
this.concreteIndexName = indexMetadata.getIndex();
this.isHidden = indexMetadata.isHidden();
this.isSystem = indexMetadata.isSystem();
this.aliases = indexMetadata.getAliases() != null ? List.of(indexMetadata.getAliases().keys().toArray(String.class)) : null;
this.aliases = indexMetadata.getAliases() != null ? indexMetadata.getAliases().keySet().stream().toList() : null;
this.dataStream = dataStream;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@

package org.elasticsearch.cluster.metadata;

import com.carrotsearch.hppc.ObjectHashSet;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.CollectionUtil;
Expand All @@ -27,7 +25,6 @@
import org.elasticsearch.cluster.metadata.IndexAbstraction.ConcreteIndex;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.collect.HppcMaps;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
Expand Down Expand Up @@ -76,6 +73,7 @@
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
Expand Down Expand Up @@ -473,13 +471,13 @@ public ImmutableOpenMap<String, MappingMetadata> findMappings(
}

ImmutableOpenMap.Builder<String, MappingMetadata> indexMapBuilder = ImmutableOpenMap.builder();
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
for (String index : intersection) {
Set<String> indicesKeys = indices.keySet();
Stream.of(concreteIndices).filter(indicesKeys::contains).forEach(index -> {
onNextIndex.run();
IndexMetadata indexMetadata = indices.get(index);
Predicate<String> fieldPredicate = fieldFilter.apply(index);
indexMapBuilder.put(index, filterFields(indexMetadata.mapping(), fieldPredicate));
}
});
return indexMapBuilder.build();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,9 +169,9 @@ public ImmutableOpenMap<String, DiscoveryNode> getMasterAndDataNodes() {
*/
public ImmutableOpenMap<String, DiscoveryNode> getCoordinatingOnlyNodes() {
ImmutableOpenMap.Builder<String, DiscoveryNode> nodes = ImmutableOpenMap.builder(this.nodes);
nodes.removeAll(masterNodes.keys());
nodes.removeAll(dataNodes.keys());
nodes.removeAll(ingestNodes.keys());
nodes.removeAllFromCollection(masterNodes.keySet());
nodes.removeAllFromCollection(dataNodes.keySet());
nodes.removeAllFromCollection(ingestNodes.keySet());
return nodes.build();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@

package org.elasticsearch.cluster.routing;

import com.carrotsearch.hppc.cursors.ObjectCursor;

import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.Assertions;
Expand Down Expand Up @@ -107,8 +105,8 @@ private RoutingNodes(RoutingTable routingTable, DiscoveryNodes discoveryNodes, b
discoveryNodes.getDataNodes().size()
);
// fill in the nodeToShards with the "live" nodes
for (ObjectCursor<String> node : discoveryNodes.getDataNodes().keys()) {
nodesToShards.put(node.value, new LinkedHashMap<>()); // LinkedHashMap to preserve order
for (var node : discoveryNodes.getDataNodes().keySet()) {
nodesToShards.put(node, new LinkedHashMap<>()); // LinkedHashMap to preserve order
}

// fill in the inverse of node -> shards allocated
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,8 +174,7 @@ public boolean validate(Metadata metadata) {
*/
public List<ShardRouting> allShards() {
List<ShardRouting> shards = new ArrayList<>();
String[] indices = indicesRouting.keys().toArray(String.class);
for (String index : indices) {
for (String index : indicesRouting.keySet()) {
List<ShardRouting> allShardsIndex = allShards(index);
shards.addAll(allShardsIndex);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,8 +143,7 @@ public void onNewInfo(ClusterInfo info) {
final long currentTimeMillis = currentTimeMillisSupplier.getAsLong();

// Clean up nodes that have been removed from the cluster
final Set<String> nodes = new HashSet<>(usages.size());
usages.keys().iterator().forEachRemaining(item -> nodes.add(item.value));
final Set<String> nodes = new HashSet<>(usages.keySet());
cleanUpRemovedNodes(nodes, nodesOverLowThreshold);
cleanUpRemovedNodes(nodes, nodesOverHighThreshold);
cleanUpRemovedNodes(nodes, nodesOverHighThresholdAndRelocating);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -619,7 +619,7 @@ private void balanceByWeights() {
* to the nodes we relocated them from.
*/
private String[] buildWeightOrderedIndices() {
final String[] indices = allocation.routingTable().indicesRouting().keys().toArray(String.class);
final String[] indices = allocation.routingTable().indicesRouting().keySet().toArray(new String[0]);
final float[] deltas = new float[indices.length];
for (int i = 0; i < deltas.length; i++) {
sorter.reset(indices[i]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@

import com.carrotsearch.hppc.IntCollection;
import com.carrotsearch.hppc.IntContainer;
import com.carrotsearch.hppc.IntLookupContainer;
import com.carrotsearch.hppc.IntObjectAssociativeContainer;
import com.carrotsearch.hppc.IntObjectHashMap;
import com.carrotsearch.hppc.IntObjectMap;
Expand Down Expand Up @@ -161,14 +160,6 @@ public Iterator<IntObjectCursor<VType>> iterator() {
return map.iterator();
}

/**
* Returns a specialized view of the keys of this associated container.
* The view additionally implements {@link com.carrotsearch.hppc.ObjectLookupContainer}.
*/
public IntLookupContainer keys() {
return map.keys();
}

/**
* Returns a direct iterator over the keys.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@

import com.carrotsearch.hppc.ObjectCollection;
import com.carrotsearch.hppc.ObjectContainer;
import com.carrotsearch.hppc.ObjectLookupContainer;
import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import com.carrotsearch.hppc.ObjectObjectMap;
Expand Down Expand Up @@ -255,37 +254,6 @@ public void forEach(Consumer<? super Map.Entry<KType, VType>> action) {
}
}

/**
* Returns a specialized view of the keys of this associated container.
* The view additionally implements {@link ObjectLookupContainer}.
*/
public ObjectLookupContainer<KType> keys() {
return map.keys();
}

/**
* Returns a direct iterator over the keys.
*/
public Iterator<KType> keysIt() {
final Iterator<ObjectCursor<KType>> iterator = map.keys().iterator();
return new Iterator<KType>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}

@Override
public KType next() {
return iterator.next().value;
}

@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}

/**
* Returns a {@link Set} view of the keys contained in this map.
*/
Expand All @@ -294,7 +262,23 @@ public Set<KType> keySet() {
return new AbstractSet<>() {
@Override
public Iterator<KType> iterator() {
return keysIt();
final Iterator<ObjectCursor<KType>> iterator = map.keys().iterator();
return new Iterator<KType>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}

@Override
public KType next() {
return iterator.next().value;
}

@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}

@Override
Expand Down Expand Up @@ -519,6 +503,12 @@ public int removeAll(ObjectPredicate<? super KType> predicate) {
return map.removeAll(predicate);
}

public void removeAllFromCollection(Collection<KType> collection) {
for (var k : collection) {
map.remove(k);
}
}

@Override
public <T extends ObjectObjectProcedure<? super KType, ? super VType>> T forEach(T procedure) {
return map.forEach(procedure);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ private IndexMetadata stripAliases(IndexMetadata indexMetadata) {
logger.info(
"[{}] stripping aliases: {} from index before importing",
indexMetadata.getIndex(),
indexMetadata.getAliases().keys()
indexMetadata.getAliases().keySet()
);
return IndexMetadata.builder(indexMetadata).removeAllAliases().build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -292,11 +292,11 @@ private boolean invariant() {
assert failedSnapshotShards.contains(shard) == false : "cannot be known and failed at same time: " + shard;
}
for (SnapshotShard shard : unknownSnapshotShards) {
assert knownSnapshotShards.keys().contains(shard) == false : "cannot be unknown and known at same time: " + shard;
assert knownSnapshotShards.keySet().contains(shard) == false : "cannot be unknown and known at same time: " + shard;
assert failedSnapshotShards.contains(shard) == false : "cannot be unknown and failed at same time: " + shard;
}
for (SnapshotShard shard : failedSnapshotShards) {
assert knownSnapshotShards.keys().contains(shard) == false : "cannot be failed and known at same time: " + shard;
assert knownSnapshotShards.keySet().contains(shard) == false : "cannot be failed and known at same time: " + shard;
assert unknownSnapshotShards.contains(shard) == false : "cannot be failed and unknown at same time: " + shard;
}
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ protected Writeable.Reader<GetMappingsResponse> instanceReader() {

private static GetMappingsResponse mutate(GetMappingsResponse original) {
ImmutableOpenMap.Builder<String, MappingMetadata> builder = ImmutableOpenMap.builder(original.mappings());
String indexKey = original.mappings().keys().iterator().next().value;
String indexKey = original.mappings().keySet().iterator().next();
builder.put(indexKey + "1", createMappingsForIndex());
return new GetMappingsResponse(builder.build());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ public void testNodesSelectors() {
nodeSelectors.add(randomFrom(NodeSelector.values()).selector);
}
int numNodeIds = randomIntBetween(0, 3);
String[] nodeIds = clusterService.state().nodes().getNodes().keys().toArray(String.class);
String[] nodeIds = clusterService.state().nodes().getNodes().keySet().toArray(new String[0]);
for (int i = 0; i < numNodeIds; i++) {
String nodeId = randomFrom(nodeIds);
nodeSelectors.add(nodeId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1128,7 +1128,7 @@ public void testBuildIndexMetadata() {
IndexMetadata indexMetadata = buildIndexMetadata("test", aliases, () -> null, indexSettings, 4, sourceIndexMetadata, false);

assertThat(indexMetadata.getAliases().size(), is(1));
assertThat(indexMetadata.getAliases().keys().iterator().next().value, is("alias1"));
assertThat(indexMetadata.getAliases().keySet().iterator().next(), is("alias1"));
assertThat("The source index primary term must be used", indexMetadata.primaryTerm(0), is(3L));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ public void testResolveNodesIds() {
}
}
int numNodeIds = randomIntBetween(0, 3);
String[] nodeIds = discoveryNodes.getNodes().keys().toArray(String.class);
String[] nodeIds = discoveryNodes.getNodes().keySet().toArray(new String[0]);
for (int i = 0; i < numNodeIds; i++) {
String nodeId = randomFrom(nodeIds);
nodeSelectors.add(nodeId);
Expand Down
Loading

0 comments on commit 42872d3

Please sign in to comment.