Skip to content

Commit

Permalink
Use Multimap for VertexStore's vertexChildren
Browse files Browse the repository at this point in the history
  • Loading branch information
LukasGasior1 committed Mar 6, 2024
1 parent 783d36f commit a79c14b
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 28 deletions.
4 changes: 2 additions & 2 deletions core/src/main/java/com/radixdlt/RadixNodeModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,8 @@ protected void configure() {
Preconditions.checkArgument(
vertexStoreConfig.maxSerializedSizeBytes()
>= VertexStoreConfig.MIN_MAX_SERIALIZED_SIZE_BYTES,
"Invalid configuration: bft.vertex_store.max_serialized_size_byte must be at least "
+ VertexStoreConfig.MIN_MAX_SERIALIZED_SIZE_BYTES);
"Invalid configuration: bft.vertex_store.max_serialized_size_byte must be at least {}",
VertexStoreConfig.MIN_MAX_SERIALIZED_SIZE_BYTES);

// System (e.g. time, random)
install(new SystemModule());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,9 @@
package com.radixdlt.consensus.vertexstore;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Multimap;
import com.google.common.hash.HashCode;
import com.radixdlt.consensus.BFTHeader;
import com.radixdlt.consensus.HighQC;
Expand Down Expand Up @@ -99,7 +101,7 @@ public final class VertexStoreJavaImpl implements VertexStore {
private final VertexStoreConfig config;

private final Map<HashCode, VertexWithHash> vertices = new HashMap<>();
private final Map<HashCode, Set<HashCode>> vertexChildren = new HashMap<>();
private final Multimap<HashCode, HashCode> vertexChildren = HashMultimap.create();
private final Map<HashCode, ExecutedVertex> executedVertices = new HashMap<>();

// These should never be null
Expand Down Expand Up @@ -133,13 +135,10 @@ private void resetToState(VertexStoreState state, WrappedByteArray serializedSta
this.vertices.clear();
this.executedVertices.clear();
this.vertexChildren.clear();
this.vertexChildren.put(rootVertex.hash(), new HashSet<>());

for (var vertexWithHash : state.getVertices()) {
this.vertices.put(vertexWithHash.hash(), vertexWithHash);
this.vertexChildren.put(vertexWithHash.hash(), new HashSet<>());
var siblings = this.vertexChildren.get(vertexWithHash.vertex().getParentVertexId());
siblings.add(vertexWithHash.hash());
this.vertexChildren.put(vertexWithHash.vertex().getParentVertexId(), vertexWithHash.hash());
}

trackCurrentStateSize(serializedState);
Expand Down Expand Up @@ -190,7 +189,7 @@ public InsertQcResult insertQc(QuorumCertificate qc) {
return new VertexStore.InsertQcResult.VertexIsMissing();
}

final var hasAnyChildren = !vertexChildren.get(qc.getProposedHeader().getVertexId()).isEmpty();
final var hasAnyChildren = vertexChildren.containsKey(qc.getProposedHeader().getVertexId());
if (hasAnyChildren) {
// TODO: Check to see if qc's match in case there's a fault
return new VertexStore.InsertQcResult.Ignored();
Expand Down Expand Up @@ -393,17 +392,16 @@ private Result<BFTInsertUpdate, VertexInsertError> insertVertexInternal(
// The vertex was executed successfully, so we're inserting it
vertices.put(executedVertex.getVertexHash(), executedVertex.getVertexWithHash());
executedVertices.put(executedVertex.getVertexHash(), executedVertex);
vertexChildren.put(executedVertex.getVertexHash(), new HashSet<>());
Set<HashCode> siblings = vertexChildren.get(executedVertex.getParentId());
siblings.add(executedVertex.getVertexHash());
vertexChildren.put(executedVertex.getParentId(), executedVertex.getVertexHash());

// We've already calculated the post-insert state (and verified
// its size against the limit), so we can just use it here.
trackCurrentStateSize(postInsertSerializedState);

// Update the metrics
metrics.bft().vertexStore().vertexCount().set(vertices.size());
if (siblings.size() > 1) {
final var vertexAndSiblings = vertexChildren.get(executedVertex.getParentId());
if (vertexAndSiblings.size() > 1) {
metrics.bft().vertexStore().forks().inc();
}
if (!vertexWithHash.vertex().hasDirectParent()) {
Expand All @@ -417,23 +415,20 @@ private Result<BFTInsertUpdate, VertexInsertError> insertVertexInternal(

private void removeVertexAndPruneInternal(HashCode vertexId, Optional<HashCode> skip) {
Optional.ofNullable(vertices.remove(vertexId))
.flatMap(
.ifPresent(
removedVertex ->
Optional.ofNullable(vertexChildren.get(removedVertex.vertex().getParentVertexId())))
.ifPresent(siblings -> siblings.remove(vertexId));
vertexChildren.remove(removedVertex.vertex().getParentVertexId(), vertexId));

executedVertices.remove(vertexId);

if (this.rootVertex.hash().equals(vertexId)) {
return;
}

var children = vertexChildren.remove(vertexId);
if (children != null) {
for (HashCode child : children) {
if (!skip.map(child::equals).orElse(false)) {
removeVertexAndPruneInternal(child, Optional.empty());
}
final var children = vertexChildren.removeAll(vertexId);
for (HashCode child : children) {
if (!Optional.of(child).equals(skip)) {
removeVertexAndPruneInternal(child, Optional.empty());
}
}
}
Expand Down Expand Up @@ -480,13 +475,8 @@ private VertexStoreState getState() {

private void getChildrenVerticesList(
VertexWithHash parent, ImmutableList.Builder<VertexWithHash> builder) {
Set<HashCode> childrenIds = this.vertexChildren.get(parent.hash());
if (childrenIds == null) {
return;
}

for (HashCode childId : childrenIds) {
final var v = vertices.get(childId);
for (HashCode child : this.vertexChildren.get(parent.hash())) {
final var v = vertices.get(child);
builder.add(v);
getChildrenVerticesList(v, builder);
}
Expand Down

0 comments on commit a79c14b

Please sign in to comment.